TF2-Wide&Deep-multi-input

import matplotlib as mpl
import matplotlib.pyplot as plt
%matplotlib inline
import numpy as np
import sklearn
import pandas as pd
import os
import sys
import tensorflow as tf
from tensorflow import keras
#Get Data
from sklearn.datasets import fetch_california_housing
housing = fetch_california_housing()
data = housing.data
target = housing.target
print("data.shape = ",housing.data.shape)
print("target.shape = ",housing.target.shape)


#split train_test data
from sklearn.model_selection import train_test_split

x_train_all , x_test ,y_train_all , y_test = train_test_split(
    data,target,random_state=7,test_size=0.25
)
print("x_train_all.shape = ",x_train_all.shape)
x_train , x_valid , y_train,y_valid = train_test_split(
    x_train_all,y_train_all,random_state = 11,test_size=0.25
)
print("x_train.shape = ",x_train.shape)

data.shape =  (20640, 8)
target.shape =  (20640,)
x_train_all.shape =  (15480, 8)
x_train.shape =  (11610, 8)
#Normarlization
from sklearn.preprocessing import StandardScaler
scalar = StandardScaler()
x_train_scaled = scalar.fit_transform(x_train)
x_valid_scaled = scalar.transform(x_valid)
x_test_scaled = scalar.transform(x_test)
#built model
#多輸入

input_wide = keras.layers.Input(shape=[5])
input_deep = keras.layers.Input(shape=[6])

hidden1 = keras.layers.Dense(30,activation='relu')(input_deep)
hidden2 = keras.layers.Dense(30,activation='relu')(hidden1)

concat = keras.layers.concatenate([input_wide,hidden2])

output = keras.layers.Dense(1)(concat)

model = keras.models.Model(
    inputs=[input_wide,input_deep],
    outputs = [output]
    )
model.summary()

model.compile(loss="mean_squared_error",optimizer="sgd")

Model: "model_1"
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_10 (InputLayer)           [(None, 6)]          0                                            
__________________________________________________________________________________________________
dense_11 (Dense)                (None, 30)           210         input_10[0][0]                   
__________________________________________________________________________________________________
input_9 (InputLayer)            [(None, 5)]          0                                            
__________________________________________________________________________________________________
dense_12 (Dense)                (None, 30)           930         dense_11[0][0]                   
__________________________________________________________________________________________________
concatenate_4 (Concatenate)     (None, 35)           0           input_9[0][0]                    
                                                                 dense_12[0][0]                   
__________________________________________________________________________________________________
dense_13 (Dense)                (None, 1)            36          concatenate_4[0][0]              
==================================================================================================
Total params: 1,176
Trainable params: 1,176
Non-trainable params: 0
__________________________________________________________________________________________________
#train

#對特徵進行分割形成兩個輸入,總共8個特徵
x_train_scaled_wide = x_train_scaled[:,:5] #前五個特徵
x_train_scaled_deep = x_train_scaled[:,2:] #後6個特徵
x_valid_scaled_wide = x_valid_scaled[:,:5] #前五個特徵
x_valid_scaled_deep = x_valid_scaled[:,2:] #後6個特徵
x_test_scaled_wide = x_test_scaled[:,:5] #前五個特徵
x_test_scaled_deep = x_test_scaled[:,2:] #後6個特徵

#fit
history = model.fit(
    [x_train_scaled_wide,x_train_scaled_deep],
    y_train,
    validation_data=(
        [x_valid_scaled_wide,x_valid_scaled_deep],
        y_valid
    ),
    epochs=10,
)
Train on 11610 samples, validate on 3870 samples
Epoch 1/10
11610/11610 [==============================] - 1s 90us/sample - loss: 2.3776 - val_loss: 1.0541
Epoch 2/10
11610/11610 [==============================] - 1s 69us/sample - loss: 0.7604 - val_loss: 0.6942
Epoch 3/10
11610/11610 [==============================] - 1s 72us/sample - loss: 0.6115 - val_loss: 0.6295
Epoch 4/10
11610/11610 [==============================] - 1s 71us/sample - loss: 0.5738 - val_loss: 0.6052
Epoch 5/10
11610/11610 [==============================] - 1s 71us/sample - loss: 0.5548 - val_loss: 0.5853
Epoch 6/10
11610/11610 [==============================] - 1s 70us/sample - loss: 0.5400 - val_loss: 0.5738
Epoch 7/10
11610/11610 [==============================] - 1s 70us/sample - loss: 0.5277 - val_loss: 0.5579
Epoch 8/10
11610/11610 [==============================] - 1s 69us/sample - loss: 0.5167 - val_loss: 0.5460
Epoch 9/10
11610/11610 [==============================] - 1s 69us/sample - loss: 0.5071 - val_loss: 0.5359
Epoch 10/10
11610/11610 [==============================] - 1s 69us/sample - loss: 0.4982 - val_loss: 0.5254

def plot_learning_curves(history):
    pd.DataFrame(history.history).plot(figsize=(10,6))
    plt.grid(True)
    plt.gca().set_ylim(0,1)
    plt.show()
plot_learning_curves(history)

在這裏插入圖片描述

model.evaluate([x_test_scaled_wide,x_test_scaled_deep],y_test)
5160/5160 [==============================] - 0s 37us/sample - loss: 0.5084





0.5084053831507069

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章