import math
countup = dict()
limit = 10001
for newkey in range(0, limit):
countup[newkey] = 0
for root1 in range(0, limit):
for root2 in range(root1, limit):
#print root1, root2
newsum = root1*root1 + root2*root2
if newsum > limit - 1:
break
#if newsum in countup:
countup[newsum] += 1
#else:
# countup[newsum] = 1
del countup[0]
total = 0
count = 0
for k in countup:
total += countup[k]
count += 1
#if countup[k] > 2:
# print k, countup[k]
print 'total ' + str(total) + ' in ' + str(count) + ' gives mean ' + str(1.0*total/count)