7.6 非線性迴歸算法應用

Python 實現:

import numpy as np
import random




#m demoted the number of examples here,not the number of feature
def gradientDescent(x,y,theta,alpha,m,numIterations):
    xTrans=x.transpose()
    for i in range(numIterations):
        hypothesis=np.dot(x,theta)
        loss=hypothesis-y
        cost=np.sum(loss**2)/(2*m)
        print("iteration %d | cost:%f" % (i,cost))
        gradient=np.dot(xTrans,loss)/m
        theta-=alpha*gradient
    return theta




def getData(numPoint,bias,variance):
    x=np.zeros((numPoint,2))
    y=np.zeros(shape=numPoint)
    for i in range(numPoint):
        x[i][0]=1
        x[i][1]=i
        y[i]=(i+bias)+random.uniform(0,1)*variance
    return x,y

#gen 100 point with a bias of 25 and 10 variance as a bit of noise
x,y=getData(100, 25,10)
m,n=np.shape(x)
numIteration=100000
alpha=0.0005
theta=np.ones(n)
print theta
theta=gradientDescent(x, y, theta, alpha, m, numIteration);
print(theta)


發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章