fromnumpyimport*defcomputer_error_for_line_given_points(b,m,points):# Initialize error at 0totalError=0# Loop through all pointsforiinrange(0,len(points)):# Get x valuex=points[i,0]# Get y valuey=points[i,0]# Get squared difference and add to total errortotalError+=(y-(m*x+b))**2# Return the averagereturntotalError/float(len(points))defgradient_descent_runner(points,starting_b,starting_m,learning_rate,num_iterations):# Starting b and mb=starting_bm=starting_m# Gradient descentforiinrange(num_iterations):# Update b and mb,m=step_gradient(b,m,array(points),learning_rate)return[b,m]# Define step gradient functiondefstep_gradient(b_current,m_current,points,learning_rate):# Initialize gradient valuesb_gradient=0m_gradient=0foriinrange(0,len(points)):x=points[i,0]y=points[i,1]N=float(len(points))# Direction w.r.t. b and m by computing partial derivatives of error functionb_gradient+=-(2/N)*(y-((m_current*x)+b_current))m_gradient+=-(2/N)*x*(y-((m_current*x)+b_current))# Update b amd m values using partial derivativesnew_b=b_current-(learning_rate*b_gradient)new_m=m_current-(learning_rate*m_gradient)return[new_b,new_m]defrun():# Step 1: collect datapoints=genfromtxt('data.csv',delimiter=',')# Step 2: define hyperparameters# Learning rate: rate of convergence of modellearning_rate=0.0001# Initialize y = mx + b valuesinitial_b=0initial_m=0# Number of iterationsnum_iterations=1000# Step 3: train modelprint('Starting gradient descent at b = {0}, m = {1}, error = {2}'.format(initial_b,initial_m,computer_error_for_line_given_points(initial_b,initial_m,points)))[b,m]=gradient_descent_runner(points,initial_b,initial_m,learning_rate,num_iterations)print('Ending point at b = {1}, m = {2}, error = {3}'.format(num_iterations,b,m,computer_error_for_line_given_points(b,m,points)))if__name__=='__main__':run()
Starting gradient descent at b = 0, m = 0, error = 2490.961189080058
Ending point at b = 0.08893651993741346, m = 1.4777440851894448, error = 572.7037959508043