前向傳播與反向傳播代碼

前向傳播與反向傳播代碼:
設 train Set= [(-0.5,12,0),(0.5,13.2,1),(0.8,8,0),(1,9,1),(1.3,6.5,1),(1.5,5,1),(1.7,3,0),(2.0,1.5,0),(2.2,2,1) ,(2.5,1,1),(3,-1.3,0),(3.3,0.5,1),(3.5,1.5,1),(3.8,1.1,0),(4.1,2.8,1),(4.5,5.1,1),(4.9,6.8,0) ,(5.3,11.2,1),(5.7,14.1,0),(6.2,21.2,1)]
testSet=[(0.7,11.1,1),(1.3,5.2,0),(1.7,3.0,0),(2.2,1.5,1),(2.6,0.5,1) ,(3.2,-0.7,0),(3.7,1.3,1),(4.3,3.5,1),(4.9,6.8,0),(5.5,13.2,1)]
說明: 數據集元素格式爲(x, y, abe )。(x, y)是輸入, abe 是結果。
網絡結構見下圖。
要求:訓練這個網絡模型,得到 W, b,λ 這三個模型參數(初始參數自調)。(不能使用開 源框架,訓練過程中打出訓練錯誤率和測試錯誤率。是有辦法訓練到 100% 準確率的)
這裏寫圖片描述

# coding=utf-8

import numpy as np

trainset=[(-0.5,12,0),(0.5,13.2,1),(0.8,8,0),(1,9,1),(1.3,6.5,1),(1.5,5,1),(1.7,3,0),(2.0,1.5,0),(2.2,2,1)\
,(2.5,1,1),(3,-1.3,0),(3.3,0.5,1),(3.5,1.5,1),(3.8,1.1,0),(4.1,2.8,1),(4.5,5.1,1),(4.9,6.8,0)\
,(5.3,11.2,1),(5.7,14.1,0),(6.2,21.2,1)]
testset=[(0.7,11.1,1),(1.3,5.2,0),(1.7,3.0,0),(2.2,1.5,1),(2.6,0.5,1)\
,(3.2,-0.7,0),(3.7,1.3,1),(4.3,3.5,1),(4.9,6.8,0),(5.5,13.2,1)]

class my_net(object):
    def __init__(self):
        #input
        self.x = 0.
        self.y = 0.
        self.label = 0

        #paras
        self.b = 1.
        self.lamta = 1
        self.w = 0.01

        #forward
        self.u1 = 0.
        self.u2 = 0.
        self.u3 = 0.
        self.u4 = 0.
        self.u5 = 0.

        self.f = 0.
        self.J = 0.
        self.pred = 0

        #backward
        self.dJ_df = 0.
        self.dJ_dw = 0.
        self.dJ_db = 0.
        self.dJ_dlamta = 0.

    def forward(self,inputdata):
        self.x=inputdata[0]
        self.y=inputdata[1]

        if inputdata.__len__()>2:
            self.label = inputdata[2]

        self.u1= self.x * self.w
        self.u2= self.u1 + self.b
        self.u3= self.u2 ** 2
        self.u4= self.u3 * self.lamta
        self.u5= self.y - self.u4
        self.f = 1. / ( 1. + np.exp( -self.u5 ) )


        if self.f > 0.5:
            self.pred = 1
        else:
            self.pred = 0

        return self.pred

    def backward(self,input_data):
        if input_data[2] == 1:
            self.dJ_df = -1.0 / self.f
        else:
            self.dJ_df = 1.0 / (1.0 - self.f)

        self.dJ_dw = self.dJ_df*(self.f*(1-self.f))*(-1)*(self.lamta)*(2*self.u2)*(1)*(self.x)
        self.dJ_dlamta = self.dJ_df*(self.f*(1-self.f))*(-1)*(self.u3)
        self.dJ_db=self.dJ_df*(self.f*(1-self.f))*(-1)*(self.lamta)*(2*self.u2)*(1)

    def update_parameters(self,learning_rate):
        self.w -= self.dJ_dw * learning_rate
        self.lamta -= self.dJ_dlamta * learning_rate
        self.b -=self.dJ_db * learning_rate

if __name__=="__main__":

    net = my_net()
    learning_rate = 0.0001
    for step in range(15000):
        train_loss = 0.
        train_cnt  = 0

        for item in trainset:
            prediction = net.forward(item)
            train_loss = -item[2]*np.log(net.f)+(item[2]-1)*np.log(1.-net.f)
            net.backward(item)
            net.update_parameters(learning_rate)
            if prediction == item[2]:
                train_cnt+=1

        if step % 300 == 0:
            print 'train error:', (trainset.__len__() - train_cnt)/1.0/trainset.__len__()
            cnt = 0
            for i in testset:
                prediction_test = net.forward(i)
                if prediction_test == i[2]:
                    cnt+=1
            print 'test error:',(testset.__len__()-cnt)/1.0/testset.__len__()

        if step % 1000 ==0:
            f = open('my_save.txt','a') 
            f.write('w:'+str(net.w)+'')
            f.write('b:' + str(net.b)+'')
            f.write('lam:' + str(net.lamta)+'\n')
            f.close()

一言以蔽之:前向存儲節點值給反向計算用

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章