吳恩達第四課第二週編程作業

本次作業目的

Keras與殘差網絡的搭建

下載地址

鏈接:https://pan.baidu.com/s/1Gu5RYjnq4b5YSSgOAPYwTA 
提取碼:1hyn 
 

代碼

Keras

import numpy as np
from keras import layers
from keras.layers import Input, Dense, Activation, ZeroPadding2D, BatchNormalization, Flatten, Conv2D
from keras.layers import AveragePooling2D, MaxPooling2D, Dropout, GlobalMaxPooling2D, GlobalAveragePooling2D
from keras.models import Model
from keras.preprocessing import image
from keras.utils import layer_utils
from keras.utils.data_utils import get_file
from keras.applications.imagenet_utils import preprocess_input
import pydot
from IPython.display import SVG
from keras.utils.vis_utils import model_to_dot
from keras.utils import plot_model
import kt_utils
import keras.backend as K
K.set_image_data_format('channels_last')
import matplotlib.pyplot as plt
from matplotlib.pyplot import imshow
import os
os.environ['KERAS_BACKEND']='tensorflow'

X_train_orig,Y_train_orig,X_test_orig,Y_test_orig,classes=kt_utils.load_dataset()
X_train=X_train_orig/255
X_test=X_test_orig/255
Y_train=Y_train_orig.T
Y_test=Y_test_orig.T

def model(input_shape):
    #定義一個placeholder
    X_input=Input(input_shape)
    #填充
    X=ZeroPadding2D((3,3))(X_input)
    #對X使用CONV -> BN -> RELU塊
    X=Conv2D(32,(7,7),strides=(1,1),name='conv0')(X)
    X=BatchNormalization(axis=3,name='bn0')(X)
    X=Activation('relu')(X)
    #池化
    X=MaxPooling2D((2,2),name='max_pool')(X)
    #降維,矩陣轉化爲向量 + 全連接層
    X=Flatten()(X)
    X=Dense(1,activation='sigmoid',name='fc')(X)
    model=Model(inputs=X_input,outputs=X,name='HappyModel')

    return model
def HappyModel(input_shape):
    #創建一個placeholder
    X_input=Input(input_shape)
    #填充
    X=ZeroPadding2D((3,3))(X_input)
    #對X使用CONV -> BN -> RELU塊
    X=Conv2D(32,(7,7),strides=(1,1),name='conv0')(X)
    X=BatchNormalization(axis=3,name='bn0')(X)
    X=Activation('relu')(X)
    #池化
    X=MaxPooling2D((2,2),name='max_pool')(X)
    #降維,全連接層
    X=Flatten()(X)
    X=Dense(1,activation='sigmoid',name='fc')(X)
    model=Model(inputs=X_input,outputs=X,name='HappyModel')
    return model


殘差

import numpy as np
import tensorflow as tf

from keras import layers
from keras.layers import Input, Add, Dense, Activation, ZeroPadding2D, BatchNormalization, Flatten, Conv2D, AveragePooling2D, MaxPooling2D, GlobalMaxPooling2D
from keras.models import Model, load_model
from keras.preprocessing import image
from keras.utils import layer_utils
from keras.utils.data_utils import get_file
from keras.applications.imagenet_utils import preprocess_input
from keras.utils.vis_utils import model_to_dot
from keras.utils import plot_model
from keras.initializers import glorot_uniform

import pydot
from IPython.display import SVG
import scipy.misc
from matplotlib.pyplot import imshow
import keras.backend as K
K.set_image_data_format('channels_last')
K.set_learning_phase(1)

import resnets_utils
import os
os.environ['KERAS_BACKEND']='tensorflow'

#恆等塊
def identity_block(X, f, filters, stage, block):
    """
    :param filters:卷積層過濾器的數量
    :param stage: 整數,根據每層的位置來命名每一層,與block參數一起使用。
    :param block: 字符串,據每層的位置來命名每一層,與stage參數一起使用。
    """
    conv_name_base='res'+str(stage)+block+'_branch'
    bn_name_base='bn'+str(stage)+block+'_branch'
    F1,F2,F3=filters
    X_shortcut=X
    #第一部分
    X=Conv2D(filters=F1,kernel_size=(1,1),strides=(1,1),padding='valid',
             name=conv_name_base+'2a',kernel_initializer=glorot_uniform(seed=0))(X)
    X=BatchNormalization(axis=3,name=bn_name_base+'2a')(X)
    X=Activation('relu')(X)
    #第二部分
    X=Conv2D(filters=F2,kernel_size=(f,f),strides=(1,1),padding='same',
             name=conv_name_base+'2b',kernel_initializer=glorot_uniform(seed=0))(X)
    X=BatchNormalization(axis=3,name=bn_name_base+'2b')(X)
    X=Activation('relu')(X)
    #第三部分
    X=Conv2D(filters=F3,kernel_size=(1,1),strides=(1,1),padding='valid',
             name=conv_name_base+'2c',kernel_initializer=glorot_uniform(seed=0))(X)
    X=BatchNormalization(axis=3,name=bn_name_base+'2c')(X)
    #****注意,這裏沒有激活函數
    #將捷徑與輸入加在一起
    X=Add()([X,X_shortcut])
    X=Activation('relu')(X)
    return X
#卷積快
def convolutional_block(X, f, filters, stage, block, s=2):
    """
    :param s:  整數,指定要使用的步幅
    """
    conv_name_base='res'+str(stage)+block+'_branch'
    bn_name_base='bn'+str(stage)+block+'_branch'
    F1,F2,F3=filters
    X_shortcut=X
    #第一部分
    X=Conv2D(filters=F1,kernel_size=(1,1),strides=(s,s),padding='valid',
             name=conv_name_base+'2a',kernel_initializer=glorot_uniform(seed=0))(X)
    X=BatchNormalization(axis=3,name=bn_name_base)(X)
    X=Activation('relu')(X)
    #第二部分
    X=Conv2D(filters=F2,kernel_size=(f,f),strides=(1,1),padding='same',
             name=conv_name_base+'2b',kernel_initializer=glorot_uniform(seed=0))(X)
    X=BatchNormalization(axis=3,name=bn_name_base)(X)
    X=Activation('relu')(X)
    #第三部分
    X=Conv2D(filters=F3,kernel_size=(1,1),strides=(1,1),padding='valid',
             name=conv_name_base+'3c',kernel_initializer=glorot_uniform(seed=0))(X)
    X=BatchNormalization(axis=3,name=bn_name_base+'3c')(X)
    #捷徑
    X_shortcut=Conv2D(filters=F3,kernel_size=(1,1),strides=(s,s),padding='valid',
             name=conv_name_base+'1',kernel_initializer=glorot_uniform(seed=0))(X_shortcut)
    X_shortcut=BatchNormalization(axis=3,name=bn_name_base+'1')(X_shortcut)
    #將捷徑與輸入加在一起
    X=Add()([X,X_shortcut])
    X=Activation('relu')(X)
    return X
#50層殘差網絡
def ResNet50(input_shape=(64,64,3),classes=6):
    """
    :param input_shape:圖像維度
    :param classes: 分類數
    """
    X_input = Input(input_shape)
    X = ZeroPadding2D((3, 3))(X_input)
    #stage1
    X = Conv2D(filters=64, kernel_size=(7, 7), strides=(2, 2),
               name='conv1', kernel_initializer=glorot_uniform(seed=0))(X)
    X = BatchNormalization(axis=3, name='bn_conv1')(X)
    X = Activation('relu')(X)
    X = MaxPooling2D(pool_size=(3, 3), strides=(2, 2))(X)
    # stage2
    X = convolutional_block(X, f=3, filters=[64, 64, 256], stage=2, block='a', s=1)
    X = identity_block(X, f=3, filters=[64, 64, 256], stage=2, block='b')
    X = identity_block(X, f=3, filters=[64, 64, 256], stage=2, block='c')
    # stage3
    X = convolutional_block(X, f=3, filters=[128, 128, 512], stage=3, block='a', s=1)
    X = identity_block(X, f=3, filters=[128, 128, 512], stage=3, block='b')
    X = identity_block(X, f=3, filters=[128, 128, 512], stage=3, block='c')
    X = identity_block(X, f=3, filters=[128, 128, 512], stage=3, block='d')
    # stage4
    X = convolutional_block(X, f=3, filters=[256, 256, 1024], stage=3, block='a', s=2)
    X = identity_block(X, f=3, filters=[256, 256, 1024], stage=4, block='b')
    X = identity_block(X, f=3, filters=[256, 256, 1024], stage=4, block='c')
    X = identity_block(X, f=3, filters=[256, 256, 1024], stage=4, block='d')
    X = identity_block(X, f=3, filters=[256, 256, 1024], stage=4, block='f')
    # stage5
    X = convolutional_block(X, f=3, filters=[512,512,2048], stage=5, block='a', s=2)
    X = identity_block(X, f=3, filters=[256, 256, 2048], stage=5, block='b')
    X = identity_block(X, f=3, filters=[256, 256, 2048], stage=5, block='c')
    #均值池化層
    X=AveragePooling2D(pool_size=(2,2),padding='same')(X)
    #輸出層
    X=Flatten()(X)
    ##全連接層(密集連接)使用softmax激活函數
    X=Dense(classes,activation='softmax',name='fc'+str(classes),kernel_initializer=glorot_uniform(seed=0))(X)
    model=Model(inputs=X_input,outputs=X,name='ResNet50')
    return model

參考網址:https://blog.csdn.net/u013733326/article/details/80250818

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章