import numpy as np
import matplotlib.pyplot as plt
data = np.genfromtxt('data.csv',delimiter=',')
x_data = data[:,0]
y_data = data[:,1]
plt.scatter(x_data,y_data)
plt.show()
# k斜率,b截距,一元線性迴歸方程y = kx + b
k,b = 0,0
# 學習率lr(learning rate),epochs最大迭代次數
lr,epochs = 0.001,50
def gradient_descent_runner(x_data,y_data,b,k,lr,epochs):
m = float(len(x_data))
for i in range(epochs):
b_grad,k_grad = 0,0
for j in range(0,len(x_data)):
b_grad += (1/m) * (((k * x_data[j]) + b) - y_data[j])
k_grad += (1/m) * x_data[j] * (((k * x_data[j]) + b) - y_data[j])
# 更新b和k
b = b - (lr * b_grad)
k = k - (lr * k_grad)
return b,k
b,k = gradient_descent_runner(x_data,y_data,b,k,lr,epochs)
plt.plot(x_data,y_data,'b.')
plt.plot(x_data,k*x_data+b,'r')
plt.show()
感謝觀看 期待關注
關注我,帶你一起寫bug
warning :未經授權,不得轉載
有問題的小夥伴請在下方留言,喜歡就點個贊吧