調參優化

import tensorflow as tf
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt

(train_image, train_lable), (test_image, test_label) = tf.keras.datasets.fashion_mnist.load_data()
train_image = train_image / 255
test_image = test_image / 255

# model = tf.keras.Sequential()
# model.add(tf.keras.layers.Flatten(input_shape=(28, 28)))  # 變成 28*28的向量
# model.add(tf.keras.layers.Dense(128, activation='relu'))
# model.add(tf.keras.layers.Dense(128, activation='relu'))
# model.add(tf.keras.layers.Dense(128, activation='relu'))
# model.add(tf.keras.layers.Dense(10, activation='softmax'))
# # print(model.summary())
# model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.001)
#               , loss='categorical_crossentropy'
#               , metrics=['acc'])
# # model.fit(train_image,train_lable,epochs=10)   #   loss: 0.2371 - acc: 0.9107
#
# train_lable_onehot = tf.keras.utils.to_categorical(train_lable)
# test_label_onehot = tf.keras.utils.to_categorical(test_label)
#
# history = model.fit(train_image, train_lable_onehot
#                     , epochs=10
#                     , validation_data=(test_image, test_label_onehot))
# # print(history.history.keys()) # dict_keys(['loss', 'acc', 'val_loss', 'val_acc'])
# plt.plot(history.epoch, history.history.get('loss'), label='loss')  # train 上面的loss
# plt.plot(history.epoch, history.history.get('val_loss'), label='val_loss')  # test上面的loss
# plt.legend()
# plt.show()

# 過擬合,訓練數據集上表現良好,在測試集上面表現糟糕
# 欠擬合,訓練數據集上表現糟糕,在測試集上面表現糟糕
# todo   添加dropout層
# model = tf.keras.Sequential()
# model.add(tf.keras.layers.Flatten(input_shape=(28, 28)))  # 變成 28*28的向量
# model.add(tf.keras.layers.Dense(128, activation='relu'))
# model.add(tf.keras.layers.Dropout(0.5))  # 隨機丟棄50%
# model.add(tf.keras.layers.Dense(128, activation='relu'))
# model.add(tf.keras.layers.Dropout(0.5))  # 隨機丟棄50%
# model.add(tf.keras.layers.Dense(128, activation='relu'))
# model.add(tf.keras.layers.Dropout(0.5))  # 隨機丟棄50%
# model.add(tf.keras.layers.Dense(10, activation='softmax'))
# # print(model.summary())
# model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.001)
#               , loss='categorical_crossentropy'
#               , metrics=['acc'])
# # model.fit(train_image,train_lable,epochs=10)   #   loss: 0.2371 - acc: 0.9107
#
train_lable_onehot = tf.keras.utils.to_categorical(train_lable)
test_label_onehot = tf.keras.utils.to_categorical(test_label)
#
# history = model.fit(train_image, train_lable_onehot
#                     , epochs=10
#                     , validation_data=(test_image, test_label_onehot))
# plt.plot(history.epoch, history.history.get('loss'), label='loss')  # train 上面的loss
# plt.plot(history.epoch, history.history.get('val_loss'), label='val_loss')  # test上面的loss
# plt.plot(history.epoch, history.history.get('acc'), label='acc')  # train 上面的acc
# plt.plot(history.epoch, history.history.get('val_acc'), label='val_acc')  # test上面的acc
# plt.legend()
# plt.show()

# todo 減小規模
model = tf.keras.Sequential()
model.add(tf.keras.layers.Flatten(input_shape=(28, 28)))
model.add(tf.keras.layers.Dense(32, activation='relu'))
model.add(tf.keras.layers.Dense(10, activation='softmax'))
model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.001)
              , loss='categorical_crossentropy'
              , metrics=['acc'])
history = model.fit(train_image, train_lable_onehot
                    , epochs=10
                    , validation_data=(test_image, test_label_onehot))
plt.plot(history.epoch, history.history.get('acc'), label='acc')  # train 上面的acc
plt.plot(history.epoch, history.history.get('val_acc'), label='val_acc')  # test上面的acc
plt.legend()
plt.show()
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章