吳恩達第一課第三週編程作業

鏈接:https://pan.baidu.com/s/1ypHuubawEcuJyAxyRMErYg 
提取碼:a9av 
 

這次的作業是建立一個2分類的神經網絡

源代碼:

import numpy as np
import matplotlib.pyplot as plt
from testCases import *
import sklearn
import sklearn.datasets
import sklearn.linear_model
from planar_utils import plot_decision_boundary, sigmoid, load_planar_dataset, load_extra_datasets
np.random.seed(1) #設置一個固定的隨機種子,保證接下來的隨機數是一致的。
X,Y=load_planar_dataset()#加載數據集
plt.scatter(X[0,:],X[1,:],c=np.squeeze(Y),s=40, cmap=plt.cm.Spectral)#繪製圖像
# plt.show()
shape_X=X.shape
shape_Y=Y.shape
m=Y.shape[1]

# 構建神經網絡的一般方法是:
# 1. 定義神經網絡結構(輸入單元的數量,隱藏單元的數量等)。
# 2. 初始化模型的參數
# 3. 循環:
#           實施前向傳播
#           計算損失
#           實現向後傳播
#           更新參數(梯度下降)
def layer_sizes(X,Y):
    """
    :param X: 輸入數據
    :param Y:標籤
    :return:輸入層,隱藏層,輸出層
    """
    n_x=X.shape[0]
    n_h=4
    n_y=Y.shape[0]
    return n_x,n_h,n_y
def initialize_parameters(n_x,n_h,n_y):
    np.random.seed(2)
    W1=np.random.randn(n_h,n_x)*0.01
    b1=np.zeros(shape=(n_h,1))
    W2=np.random.randn(n_y,n_h)*0.01
    b2=np.zeros(shape=(n_y,1))

    assert (W1.shape==(n_h,n_x))
    assert (b1.shape==(n_h,1))
    assert (W2.shape==(n_y,n_h))
    assert (b2.shape==(n_y,1))

    parameters={
        'W1':W1,
        'W2':W2,
        'b1':b1,
        'b2':b2
    }
    return parameters
def forward_propagation(X,parameters):
    W1 = parameters['W1']
    b1 = parameters['b1']
    W2 = parameters['W2']
    b2 = parameters['b2']

    Z1=np.dot(W1,X)+b1
    A1=np.tanh(Z1)
    Z2=np.dot(W2,A1)+b2
    A2=sigmoid(Z2)

    assert (A2.shape==(1,X.shape[1]))


    cache={
        'Z1':Z1,
        'Z2':Z2,
        'A1':A1,
        'A2':A2
    }
    return A2,cache
def compute_cost(A2,Y,):
    m=Y.shape[1]
    logprobs=Y*np.log(A2)+(1-Y)*np.log(1-A2)
    cost=-np.sum(logprobs)/m
    cost=float(np.squeeze(cost))
    assert (isinstance(cost,float))
    return cost
def backward_propagation(parameters,cache,X,Y):
    A1=cache['A1']
    A2=cache['A2']
    m=Y.shape[1]
    W2=parameters['W2']

    dZ2=A2-Y
    dW2=np.dot(dZ2,A1.T)/m
    db2=np.sum(dZ2,axis=1,keepdims=True)/m
    dZ1=np.multiply(np.dot(W2.T,dZ2),1 - np.power(A1, 2))
    dW1=np.dot(dZ1,X.T)/m
    db1=np.sum(dZ1,axis=1,keepdims=True)/m

    grads={
        'dW1':dW1,
        'dW2':dW2,
        'db1':db1,
        'db2':db2
    }
    return grads
def update_parameters(parameters,grads,learning_rate=1.2):
    W1=parameters['W1']
    W2=parameters['W2']
    b1=parameters['b1']
    b2=parameters['b2']
    dW1=grads['dW1']
    dW2=grads['dW2']
    db1=grads['db1']
    db2=grads['db2']

    W1=W1-learning_rate*dW1
    b1=learning_rate*db1
    W2=W2-learning_rate*dW2
    b2=b2-learning_rate*db2

    parameters={
        'W1':W1,
        'W2':W2,
        'b1':b1,
        'b2':b2
    }
    return parameters
def nn_model(X,Y,n_h,num_iterations,print_cost=False):
    np.random.seed(3)
    n_x=layer_sizes(X,Y)[0]
    n_y=layer_sizes(X,Y)[2]
    parameters=initialize_parameters(n_x,n_h,n_y)
    for i in range(num_iterations):
        A2,cache=forward_propagation(X,parameters)
        cost=compute_cost(A2,Y)
        grads=backward_propagation(parameters,cache,X,Y)
        parameters=update_parameters(parameters,grads,learning_rate=0.5)
        if print_cost:
            if i%1000==0:
                print("第",i,"次循環,成本爲:"+str(cost))
    return parameters
def predict(parameters,X):
    A2 , cache = forward_propagation(X,parameters)
    predictions = np.round(A2)
    return predictions


parameters = nn_model(X, Y, n_h = 4, num_iterations=10000, print_cost=True)
#繪製邊界
plot_decision_boundary(lambda x: predict(parameters, x.T), X, Y)
plt.title("Decision Boundary for hidden layer size " + str(4))
predictions = predict(parameters, X)
print ('準確率: %d' % float((np.dot(Y, predictions.T) + np.dot(1 - Y, 1 - predictions.T)) / float(Y.size) * 100) + '%')

參考:https://blog.csdn.net/u013733326/article/details/79827273

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章