Logistic Regression

# 導入所需要的包
import torch
import matplotlib.pyplot as plt
import numpy as np
from torch.autograd import Variable
# 獲取訓練數據
with open('data.txt','r') as f:
    data = f.read().split('\n')
    data = [row.split(',') for row in data]
    label0 = np.array([(float(row[0]),float(row[1])) for row in data if row[2]=='0'])
    label1 = np.array([(float(row[0]),float(row[1])) for row in data if row[2]=='1'])
plt.plot(label0[:,0],label0[:,1],'ro',label='y==0')
plt.plot(label1[:,0],label1[:,1],'bo',label='y==1')
plt.show()
x_data1 = torch.from_numpy(label0)
y_data1 = torch.ones(list(x_data1.size())[0],1)
x_data2 = torch.from_numpy(label1)
y_data2 = torch.zeros(list(x_data2.size())[0],1)
x_data = torch.cat((x_data1,x_data2),0).float()
y_data = torch.cat((y_data1,y_data2),0).float()
# 定義網絡模型
class LogisticRegression(torch.nn.Module):
    def __init__(self):
        super(LogisticRegression,self).__init__()
        self.lr = torch.nn.Linear(2,1)
        self.sm = torch.nn.Sigmoid()
    def forward(self, x):
        x = self.lr(x)
        x = self.sm(x)
        return x
# 判斷是否進行GPU加速
logistic_model = LogisticRegression()
if torch.cuda.is_available():
    logistic_model.cuda()
# 定義損失函數,優化器
loss_func = torch.nn.BCELoss()
optimizer = torch.optim.SGD(logistic_model.parameters(),lr=1e-3,momentum=0.9)
# 訓練神經網絡
for epoch in range(50000):
    if torch.cuda.is_available():
        x = Variable(x_data).cuda()
        y = Variable(y_data).cuda()
        # forward
        output = logistic_model(x)
        loss = loss_func(output,y)
        print_loss = loss.item()
        mask = output.ge(0.5).float()
        correct = (mask == y).sum()
        acc = correct.item() / x.size(0)
        # backward
        optimizer.zero_grad()
        loss.backward()
        optimizer.step()
        if (epoch + 1) % 10000 == 0:
            print('*' * 10)
            print('epoch {}'.format(epoch + 1))
            print('loss is {:.4f}'.format(print_loss))
            print('correct rate is {:.4f}'.format(acc))
# 畫出曲線
w0,w1 = logistic_model.lr.weight[0]
w0 = w0.item()
w1 = w1.item()
b = logistic_model.lr.bias.item()
plot_x = np.arange(30,100,0.1)
plt.plot(label0[:,0],label0[:,1],'ro',label='y==0')
plt.plot(label1[:,0],label1[:,1],'bo',label='y==1')
plt.legend(loc = 'best')
plot_y = (-w0 * plot_x - b) / w1
plt.plot(plot_x,plot_y)
plt.show()

訓練數據:

 

訓練好的函數:

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章