100-Days-Of-ML-Code(Day4/5/6--LogisticRegression)

#!---* coding: utf-8 --*--
#!/usr/bin/python
"""
ProjectName:LogisticRegression
@Author:Aifu Han
Date:2018.8.11
"""
#step1:數據預處理
import numpy as np
import pandas as pd
import  matplotlib.pyplot as plt
#導入數據
dataset = pd.read_csv('./Social_Network_Ads.csv')
X = dataset.iloc[:, [2, 3]].values
Y = dataset.iloc[:, 4].values
#特徵縮放
from sklearn.preprocessing import StandardScaler
sc_X = StandardScaler()

X = sc_X.fit_transform(X)
#將數據分爲訓練集和測試集
from sklearn.model_selection import train_test_split
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.25, random_state=0)

#step2:訓練集上訓練模型
from sklearn.linear_model import LogisticRegression
classifier = LogisticRegression()
classifier = classifier.fit(X_train,Y_train)

#step3:模型的預測
Y_pred = classifier.predict(X_test)
print(Y_pred)

#step4:畫圖
from matplotlib.colors import ListedColormap
X_set,y_set=X_train,Y_train
X1,X2=np. meshgrid(np. arange(start=X_set[:,0].min()-1, stop=X_set[:, 0].max()+1, step=0.01),
                   np. arange(start=X_set[:,1].min()-1, stop=X_set[:,1].max()+1, step=0.01))

plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(),X2.ravel()]).T).reshape(X1.shape),
             alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(),X1.max())
plt.ylim(X2.min(),X2.max())
for i,j in enumerate(np. unique(y_set)):
    plt.scatter(X_set[y_set==j,0],X_set[y_set==j,1],
                c = ListedColormap(('red', 'green'))(i), label=j)

plt. title(' LOGISTIC(Training set)')
plt. xlabel(' Age')
plt. ylabel(' Estimated Salary')
plt. legend()
plt. show()

from matplotlib.colors import ListedColormap
X_set,y_set=X_test,Y_test
X1,X2=np. meshgrid(np. arange(start=X_set[:,0].min()-1, stop=X_set[:, 0].max()+1, step=0.01),
                   np. arange(start=X_set[:,1].min()-1, stop=X_set[:,1].max()+1, step=0.01))

plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(),X2.ravel()]).T).reshape(X1.shape),
             alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(),X1.max())
plt.ylim(X2.min(),X2.max())
for i,j in enumerate(np. unique(y_set)):
    plt.scatter(X_set[y_set==j,0],X_set[y_set==j,1],
                c = ListedColormap(('red', 'green'))(i), label=j)

plt. title(' LOGISTIC(Test set)')
plt. xlabel(' Age')
plt. ylabel(' Estimated Salary')
plt. legend()
plt. show()

#另一種方式畫邏輯迴歸二分類圖
#先顯示訓練集的散點圖
plt.figure(figsize=(10,6))
plt.scatter(X_train[Y_train==0][:, 0],X_train[Y_train==0][:,1],color='b',label='0')
plt.scatter(X_train[Y_train==1][:, 0],X_train[Y_train==1][:,1],color='r',label='1')
plt.legend()
x1_min, xl_max=X_train[:,0].min(), X_train[:,0].max()
x2_min, x2_max=X_train[:,1].min(), X_train[:,1].max()
xx1, xx2=np.meshgrid(np.linspace(x1_min, xl_max), np.linspace(x2_min,x2_max))
grid=np.c_[ xx1.ravel(), xx2.ravel()]
probs=classifier.predict(grid).reshape(xx1.shape)

plt.contour(xx1, xx2, probs, [0.5], linewidths = 1, colors = 'black')
plt.show()

#再顯示測試集的散點圖
plt.figure(figsize=(10,6))
plt.scatter(X_test[Y_pred==0][:, 0],X_test[Y_pred==0][:,1],color='b',label='0')
plt.scatter(X_test[Y_pred==1][:, 0],X_test[Y_pred==1][:,1],color='r',label='1')
plt. legend()
x3_min, x3_max=X_test[:,0].min(), X_test[:,0].max()
x4_min, x4_max=X_test[:,1].min(), X_test[:,1].max()
xx3, xx4=np.meshgrid(np.linspace(x3_min, x3_max), np.linspace(x4_min,x4_max))
grid1=np.c_[ xx3.ravel(), xx4.ravel()]
probs1=classifier.predict(grid).reshape(xx3.shape)

plt.contour(xx3, xx4, probs1, [0.5], linewidths = 1, colors = 'black')
plt.show()

輸出的預測結果y_pred爲:

圖像採用了兩種方式來顯示:

另外一種形式的:

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章