every blog every motto:
0. 前言
卷積神經網絡實戰,激活函數selu
1. 代碼部分
1. 導入模塊
import matplotlib as mpl
import matplotlib.pyplot as plt
%matplotlib inline
import numpy as np
import sklearn
import pandas as pd
import os
import sys
import time
import tensorflow as tf
from tensorflow import keras
os.environ['CUDA_VISIBLE_DEVICES'] = '/gpu:0'
print(tf.__version__)
print(sys.version_info)
for module in mpl,np,pd,sklearn,tf,keras:
print(module.__name__,module.__version__)
2. 讀取數據
fashion_mnist = keras.datasets.fashion_mnist
# print(fashion_mnist)
(x_train_all,y_train_all),(x_test,y_test) = fashion_mnist.load_data()
x_valid,x_train = x_train_all[:5000],x_train_all[5000:]
y_valid,y_train = y_train_all[:5000],y_train_all[5000:]
# 打印格式
print(x_valid.shape,y_valid.shape)
print(x_train.shape,y_train.shape)
print(x_test.shape,y_test.shape)
3. 數據歸一化
# 數據歸一化
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
# x_train:[None,28,28] -> [None,784]
x_train_scaled = scaler.fit_transform(x_train.astype(np.float32).reshape(-1,1)).reshape(-1,28,28,1)
x_valid_scaled = scaler.transform(x_valid.astype(np.float32).reshape(-1,1)).reshape(-1,28,28,1)
x_test_scaled = scaler.transform(x_test.astype(np.float32).reshape(-1,1)).reshape(-1,28,28,1)
4. 構建模型
# tf.keras.models.Sequential()
# 構建模型
model = keras.models.Sequential()
# 卷積神經網絡
model.add(keras.layers.Conv2D(filters=32,kernel_size=3,padding="same",activation='selu',input_shape=(28,28,1)))
model.add(keras.layers.Conv2D(filters=32,kernel_size=3,padding='same',activation='selu'))
model.add(keras.layers.MaxPool2D(pool_size=2))
model.add(keras.layers.Conv2D(filters=64,kernel_size=3,padding="same",activation='selu'))
model.add(keras.layers.Conv2D(filters=64,kernel_size=3,padding='same',activation='selu'))
model.add(keras.layers.MaxPool2D(pool_size=2))
model.add(keras.layers.Conv2D(filters=128,kernel_size=3,padding="same",activation='selu'))
model.add(keras.layers.Conv2D(filters=128,kernel_size=3,padding='same',activation='selu'))
model.add(keras.layers.MaxPool2D(pool_size=2))
# 展平
model.add(keras.layers.Flatten())
# 全連接層
model.add(keras.layers.Dense(128,activation='selu'))
# 輸出層
model.add(keras.layers.Dense(10,activation="softmax"))
#
model.compile(loss='sparse_categorical_crossentropy',optimizer='sgd',metrics=['accuracy'])
model.summary()
5. 開始訓練
# 回調函數 Tensorboard(文件夾)\earylystopping\ModelCheckpoint(文件名)
logdir = os.path.join("cnn-selu-callbacks")
print(logdir)
if not os.path.exists(logdir):
os.mkdir(logdir)
# 文件名
output_model_file = os.path.join(logdir,"fashion_mnist_model.h5")
callbacks = [
keras.callbacks.TensorBoard(logdir),
keras.callbacks.ModelCheckpoint(output_model_file,save_best_only=True),
keras.callbacks.EarlyStopping(patience=5,min_delta=1e-3),
]
# 開始訓練
history = model.fit(x_train_scaled,y_train,epochs=10,validation_data=(x_valid_scaled,y_valid),callbacks=callbacks)
6. 學習曲線
# 畫圖
def plot_learning_curves(history):
pd.DataFrame(history.history).plot(figsize=(8,5))
plt.grid(True)
plt.gca().set_ylim(0,1)
plt.show()
plot_learning_curves(history)
# 損失函數,剛開始下降慢的原因
# 1. 參數衆多,訓練不充分
# 2. 梯度消失 -》 鏈式法則中
# 解決: selu緩解梯度消失