最值歸一化 Normalization
# 最值歸一化 Normalization
import numpy as np
x = np.random.randint(1,100,size=100)
#進行最值歸一化
x = (x - np.min(x)) / (np.max(x) - np.min(x))
#多維數組的最值歸一化
X = np.random.randint(1,100,(50,2))
X = np.array(X,dtype=float)
for i in range(2):
X[:,i] = (X[:,i] - np.min(X[:,i])) / (np.max(X[:,i]) - np.min(X[:,i]))
import matplotlib.pyplot as plt
plt.scatter(X[:,0],X[:,1])
plt.show()
適用於極值有範圍的數據集
均值方差歸一化Standardization
均值爲0 方差爲1
# 均值方差歸一化Standardization
x2 = np.random.randint(1,100,(50,2))
x2 = np.array(x2, dtype=float)
for i in range(2):
x2[:,i] = (x2[:,i] - np.mean(x2[:,i])) / np.std(x2[:,i])
plt.scatter(x2[:,0],x2[:,1])
plt.show()
使用sklearn中的StandardScale進行均值方差歸一化
#使用sklearn中的StandardScale進行均值方差歸一化
from sklearn import datasets
iris = datasets.load_iris()
x = iris.data
y = iris.target
from sklearn.model_selection import train_test_split
x_train, x_test,y_train, y_test = train_test_split(x,y,test_size = 0.2,random_state = 666)
#數據預處理包中加載 StandardScaler
from sklearn.preprocessing import StandardScaler
standerdsclaer = StandardScaler()
standerdsclaer.fit(x_train)
x_train = standerdsclaer.transform(x_train)
x_test_standard = standerdsclaer.transform(x_test)
from sklearn.neighbors import KNeighborsClassifier
knn_clf = KNeighborsClassifier(n_neighbors=3)
knn_clf.fit(x_train,y_train)
print(knn_clf.score(x_test_standard, y_test))
1.0