keras的自定義 layer和Model

 

本文對https://www.cnblogs.com/zdm-code/p/12245906.html  裏面的一些細節做了修改,model裏面增加了一層softmax。

 

import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers,Sequential,optimizers,datasets,metrics

def preprocess(x,y):
    x = tf.cast(tf.reshape(x,[-1]),dtype=tf.float32)/255.
    y = tf.cast(tf.one_hot(y,depth=10),dtype=tf.int32)
    return x,y

#load_data
(x_train,y_train),(x_val,y_val) = datasets.mnist.load_data()
print('data: ',x_train.shape,y_train.shape,x_val.shape,y_val.shape)

db = tf.data.Dataset.from_tensor_slices((x_train,y_train))
db = db.map(preprocess).shuffle(60000).batch(128)
db_val = tf.data.Dataset.from_tensor_slices((x_val,y_val))
db_val = db_val.map(preprocess).batch(128)

#self def layer
class MyDense(layers.Layer):#inherit layers.Layer
    def __init__(self,input_dim,output_dim):#init
        super(MyDense,self).__init__()
        self.kernal = self.add_variable('w',[input_dim,output_dim])
        self.bias = self.add_variable('b',[output_dim])

    def call(self,inputs,training=None):#compute
        out = inputs @ self.kernal + self.bias
        
        #out = tf.matmul(inputs , self.kernal) + self.bias
        return out

#self def network
class MyModel(keras.Model):#inherit keras.Model
    def __init__(self):#init
        super(MyModel,self).__init__()
        self.fc1 = MyDense(input_dim=28*28,output_dim=512)
        self.fc2 = MyDense(input_dim=512, output_dim=256)
        self.fc3 = MyDense(input_dim=256, output_dim=64)
        self.fc4 = MyDense(input_dim=64, output_dim=10)

    def call(self,inputs,training=None):#compute inputs.shape = [b,28*28]
        x = self.fc1(inputs)
        x = tf.nn.relu(x)
        x = self.fc2(x)
        x = tf.nn.relu(x)
        x = self.fc3(x)
        x = tf.nn.relu(x)
        x = self.fc4(x)
        
        x = tf.nn.softmax(x)

        return x

network = MyModel()
network.build(input_shape=[None,28*28])
network.summary()



#input para
network.compile(optimizer=optimizers.Adam(lr=1e-2),
                loss = tf.keras.losses.CategoricalCrossentropy(),
                metrics = ['accuracy'])

#run network
network.fit(db,epochs=20,validation_data=db_val,validation_freq=1)


 

 

data:  (60000, 28, 28) (60000,) (10000, 28, 28) (10000,)
Model: "my_model_11"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
my_dense_54 (MyDense)        multiple                  401920    
_________________________________________________________________
my_dense_55 (MyDense)        multiple                  131328    
_________________________________________________________________
my_dense_56 (MyDense)        multiple                  16448     
_________________________________________________________________
my_dense_57 (MyDense)        multiple                  650       
=================================================================
Total params: 550,346
Trainable params: 550,346
Non-trainable params: 0
_________________________________________________________________
Epoch 1/20
469/469 [==============================] - 14s 29ms/step - loss: 0.2793 - acc: 0.9159 - val_loss: 0.1509 - val_acc: 0.9545
Epoch 2/20
469/469 [==============================] - 6s 12ms/step - loss: 0.1276 - acc: 0.9629 - val_loss: 0.1237 - val_acc: 0.9687
Epoch 3/20
469/469 [==============================] - 6s 12ms/step - loss: 0.1025 - acc: 0.9708 - val_loss: 0.1377 - val_acc: 0.9655
Epoch 4/20
469/469 [==============================] - 6s 12ms/step - loss: 0.0921 - acc: 0.9744 - val_loss: 0.1173 - val_acc: 0.9680
Epoch 5/20
469/469 [==============================] - 6s 12ms/step - loss: 0.0865 - acc: 0.9750 - val_loss: 0.1134 - val_acc: 0.9709
Epoch 6/20
469/469 [==============================] - 6s 12ms/step - loss: 0.0778 - acc: 0.9788 - val_loss: 0.1259 - val_acc: 0.9700
Epoch 7/20
469/469 [==============================] - 6s 12ms/step - loss: 0.0830 - acc: 0.9780 - val_loss: 0.1511 - val_acc: 0.9700
Epoch 8/20
469/469 [==============================] - 6s 12ms/step - loss: 0.0732 - acc: 0.9802 - val_loss: 0.1387 - val_acc: 0.9733
Epoch 9/20
469/469 [==============================] - 5s 12ms/step - loss: 0.0719 - acc: 0.9806 - val_loss: 0.1499 - val_acc: 0.9671
Epoch 10/20
469/469 [==============================] - 6s 12ms/step - loss: 0.0568 - acc: 0.9847 - val_loss: 0.1490 - val_acc: 0.9713
Epoch 11/20
469/469 [==============================] - 6s 12ms/step - loss: 0.0623 - acc: 0.9841 - val_loss: 0.1483 - val_acc: 0.9675
Epoch 12/20
469/469 [==============================] - 6s 12ms/step - loss: 0.0597 - acc: 0.9844 - val_loss: 0.1277 - val_acc: 0.9747
Epoch 13/20
469/469 [==============================] - 5s 11ms/step - loss: 0.0665 - acc: 0.9830 - val_loss: 0.1573 - val_acc: 0.9698
Epoch 14/20
469/469 [==============================] - 6s 12ms/step - loss: 0.0553 - acc: 0.9865 - val_loss: 0.1766 - val_acc: 0.9678
Epoch 15/20
469/469 [==============================] - 5s 12ms/step - loss: 0.0527 - acc: 0.9868 - val_loss: 0.1403 - val_acc: 0.9754
Epoch 16/20
469/469 [==============================] - 6s 12ms/step - loss: 0.0474 - acc: 0.9879 - val_loss: 0.1371 - val_acc: 0.9767
Epoch 17/20
469/469 [==============================] - 6s 13ms/step - loss: 0.0504 - acc: 0.9877 - val_loss: 0.1545 - val_acc: 0.9714
Epoch 18/20
469/469 [==============================] - 6s 12ms/step - loss: 0.0496 - acc: 0.9881 - val_loss: 0.1739 - val_acc: 0.9702
Epoch 19/20
469/469 [==============================] - 5s 11ms/step - loss: 0.0486 - acc: 0.9883 - val_loss: 0.1839 - val_acc: 0.9714
Epoch 20/20
469/469 [==============================] - 6s 12ms/step - loss: 0.0481 - acc: 0.9889 - val_loss: 0.1863 - val_acc: 0.9724

 

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章