! 比較簡單,直接上代碼
import torch
import torch.nn as nn
import torch.optim
from torch.autograd import Variable
from sklearn import datasets
import numpy as np
#### 讀取iris數據
iris=datasets.load_iris()
x,y=iris.data,iris.target
### 前100行包括兩個分類
x=x[0:100,:].astype(np.float32)
y=np.array(y[0:100],dtype=np.float32).reshape(-1,1)
#將numpy格式轉爲tensor
x_train=torch.from_numpy(x)
y_train=torch.from_numpy(y)
# 定義模型,輸入爲4維的向量,輸出爲1維
class logisticReg(nn.Module):
def __init__(self):
super(logisticReg,self).__init__()
self.conv=nn.Sequential(
nn.Linear(4,1),
nn.Sigmoid()
)
def forward(self, x):
y=self.conv(x)
return y
#
model=logisticReg()
# 定義一個損失函數
criterion=nn.MSELoss()
# 定義一個優化函數
optimizer=torch.optim.SGD(model.parameters(),lr=0.01)
# 開始訓練
for epoch in range(10000):
x=Variable(x_train)
y=Variable(y_train)
y_pred=model(x)
loss=criterion(y_pred,y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
if epoch % 1000==0:
print("loss:{},epoch:{}".format(loss, epoch))
!顯示結果
loss:0.27741584181785583,epoch:0
loss:0.022335145622491837,epoch:1000
loss:0.011672687716782093,epoch:2000
loss:0.00790750328451395,epoch:3000
loss:0.005995686165988445,epoch:4000
loss:0.0048401085659861565,epoch:5000
loss:0.004065848421305418,epoch:6000
loss:0.0035105173010379076,epoch:7000
loss:0.003092461731284857,epoch:8000
loss:0.002766150049865246,epoch:9000