#!/home/pi/miniconda3/bin/python3.4

# The Gauss-Newton method for finding the least squares solution to an overdetermined set of equations.
# Also known as "gradient-descent".

from timeit import default_timer as timer

def dot(a, b):
	return sum(el_a * el_b for el_a, el_b in zip(a, b))

def half_average_squared_error(lhs, y, weights, m):
	result = 0.0
	for i in range(m):
		result += (dot(weights, lhs[i]) - y[i]) ** 2

	return result / 2.0 / m

def gradient_descent(lhs, y, iters, weights, alpha, print_freq):
	m, n = len(lhs), len(lhs[0])

	for k in range(iters):
		weights_new = []
		for i in range(n):
			deriv = 0.0
			for j in range(m):
				deriv += (dot(weights, lhs[j]) - y[j]) * lhs[j][i]
			weights_new.append(weights[i] - alpha * deriv / m)
		weights = list(weights_new)
		if (k + 1) % print_freq == 0:
			print('At iteration {0:5d}: Half average squared error = {1:.6f}'.format(k + 1, half_average_squared_error(lhs, y, weights, m)))

	return weights
	
if __name__ == "__main__":
# y = 3.0 + 4.0 * x1 - 2.0 * x2
# the lhs array of arrays represents the left-hand-side coefficients for 5 equations in 3 unknowns
	lhs = [[1.0, 0.0, 0.0], [1.0, 1.0, 1.0], [1.0, 2.0, -1.0], [1.0, -1.0, 2.0], [1.0, 2.5, 0.0]]
	weights_exact, y = [3.0, 4.0, -2.0], []
	for i in range(len(lhs)):
		y.append(dot(lhs[i], weights_exact))
	# print(y)
# the y array represents the right-hand-sides of the 5 equations
	iters, initial_weights, alpha, print_freq = 100, [0.0, 0.0, 0.0], 0.6, 10
	start = timer()
	weights = gradient_descent(lhs, y, iters, initial_weights, alpha, print_freq)
	end = timer()
	print('''
The gradient-descent (Gauss-Newton) method took {0:.0f} milliseconds to find
the least squares solution to {1:d} linear equations in {2:d} variables.
'''.format(1000 * (end - start), len(lhs), len(weights_exact)))

	for i in range(len(lhs[0])):
		print('Weights[{0:d}] = {1:+.4f} (Exact = {2:+.4f})'.format(i, weights[i], weights_exact[i]))

# End of code

# Sample output:

# At iteration    10: Half average squared error = 0.291260
# At iteration    20: Half average squared error = 0.004268
# At iteration    30: Half average squared error = 0.000277
# At iteration    40: Half average squared error = 0.000029
# At iteration    50: Half average squared error = 0.000003
# At iteration    60: Half average squared error = 0.000000
# At iteration    70: Half average squared error = 0.000000
# At iteration    80: Half average squared error = 0.000000
# At iteration    90: Half average squared error = 0.000000
# At iteration   100: Half average squared error = 0.000000

# The gradient-descent (Gauss-Newton) method took 20 milliseconds to find
# the least squares solution to 5 linear equations in 3 variables.

# Weights[0] = +3.0000 (Exact = +3.0000)
# Weights[1] = +4.0000 (Exact = +4.0000)
# Weights[2] = -2.0000 (Exact = -2.0000)