python 梯度下降應用於線性迴歸
import os
import os.path
def lineLogist():
#train data
x = [(1,104,3),(1,100,3),(1,200,3),(1,216,2),(1,300,4)]
#y[i] is the output of y = theta0 * x[0] + theta1 * x[1] +theta2 * x[2]
y=[400,330,369,232,540]
epsilom=0.0000000001
#learning rate
alpha=0.0001
diff=[0,0]
max_iter=1000
error1=0
error0=0
cnt=0
m=len(x)
#init the parameters to zero
theta0=0
theta1=0
theta2=0
while cnt<max_iter:
cnt+=1
#calculate the parameters
for i in range(m):
diff[0]=(y[i])-(theta0*x[i][0]+theta1*x[i][1]+theta2*x[i][2])
theta0=theta0+alpha*diff[0]*x[i][0]
theta1=theta1+alpha*diff[0]*x[i][1]
theta2=theta2+alpha*diff[0]*x[i][2]
#calculate the cost function
error1=0
for lp in range(m):
error1+=( y[lp]-( theta0*x[lp][0] + theta1 * x[lp][1] + theta2 * x[lp][2] ) )**2/2
if abs(error1-error0)<epsilom:
break
else:
error0=error1
print 'theta0:%f,theta1:%f,theta2:%f,error1:%f'%(theta0,theta1,theta2,error1)
if __name__=="__main__":
lineLogist()
import os.path
def lineLogist():
#train data
x = [(1,104,3),(1,100,3),(1,200,3),(1,216,2),(1,300,4)]
#y[i] is the output of y = theta0 * x[0] + theta1 * x[1] +theta2 * x[2]
y=[400,330,369,232,540]
epsilom=0.0000000001
#learning rate
alpha=0.0001
diff=[0,0]
max_iter=1000
error1=0
error0=0
cnt=0
m=len(x)
#init the parameters to zero
theta0=0
theta1=0
theta2=0
while cnt<max_iter:
cnt+=1
#calculate the parameters
for i in range(m):
diff[0]=(y[i])-(theta0*x[i][0]+theta1*x[i][1]+theta2*x[i][2])
theta0=theta0+alpha*diff[0]*x[i][0]
theta1=theta1+alpha*diff[0]*x[i][1]
theta2=theta2+alpha*diff[0]*x[i][2]
#calculate the cost function
error1=0
for lp in range(m):
error1+=( y[lp]-( theta0*x[lp][0] + theta1 * x[lp][1] + theta2 * x[lp][2] ) )**2/2
if abs(error1-error0)<epsilom:
break
else:
error0=error1
print 'theta0:%f,theta1:%f,theta2:%f,error1:%f'%(theta0,theta1,theta2,error1)
if __name__=="__main__":
lineLogist()
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.