tensorflow代碼示例——單隱層神經網絡

#coding: utf-8
import tensorflow as tf
from numpy.random import RandomState

#每輪訓練輸入的數據量
batch_size = 8

'''
含有一個隱藏層的神經網絡
輸入層                           隱藏層                              輸出層
                                 A1
                           
                  

(X1)
          
            
                                 A2                                   Y


(X2)

                                 A3
'''

#w1是輸入層和隱藏層之前的全連接權重, 相應的w2是隱藏層和輸出層之間的連接權重
#這裏採用tensorflow內置的隨機正態分佈對連接權重進行初始化
w1 = tf.Variable(tf.random_normal([2, 3], stddev=1, seed=1))
w2 = tf.Variable(tf.random_normal([3, 1], stddev=1, seed=1))

#定義輸入數據的佔位
x = tf.placeholder(tf.float32, shape=(None, 2), name="x-input")
y_= tf.placeholder(tf.float32, shape=(None, 1), name="y-input")

#定義神經網絡的前向傳播過程
a = tf.matmul(x, w1)
y = tf.matmul(a, w2)

#採用sigmoid激活函數
y = tf.sigmoid(y)

#採用交叉墒損失函數(y 和 y_ 之前的差距,其中y是神經網絡的預測值,y_是實際值)
#clip_by_value對變量的值做上下限處理,將其限制在某個範圍內
val = y_ * tf.log(tf.clip_by_value(y, 1e-10, 1.0)) + (1 - y_) * tf.log(tf.clip_by_value(1-y, 1e-10, 1.0))
cross_entropy = -tf.reduce_mean(val)

#反向傳播優化方法,除了AdamOptimizer之外,常用的還有GradientDescentOptimizer MomentumOptimizer
#0.001是學習率,學習的目標就是最小化損失函數
train_step = tf.train.AdamOptimizer(0.001).minimize(cross_entropy)

#接下來我們造一點訓練數據
rdm = RandomState(1)
dataset_size = 128 #128 組訓練數據
X = rdm.rand(dataset_size, 2) #隨機出來的訓練數據輸入
Y = [[int(x1+x2<1)] for (x1,x2) in X] #訓練數據期望的輸出

#運行整個訓練過程
with tf.Session() as sess:
    sess.run(tf.global_variables_initializer()) #初始化所有的變量

    #變量初始化之後,看下w1和w2的當前值
    print sess.run(w1)
    print sess.run(w2)

    #迭代5000次
    for i in range(5000):
        #每次採用batch_size組數據進行訓練
        start = (i*batch_size) % dataset_size
        end = min(start + batch_size, dataset_size)

        sess.run(train_step, feed_dict={x: X[start:end], y_:Y[start:end]})

        #每100輪輸出一下當前的訓練結果:W1和W2和交叉墒
        if i % 100 == 0:
            total_entropy = sess.run(cross_entropy, feed_dict={x:X, y_:Y})
            print("after %d training step, cross entropy is %g" % (i, total_entropy))

    print sess.run(w1)
    print sess.run(w2)



###
2019-07-13 12:05:25.725879: I tensorflow/core/platform/cpu_feature_guard.cc:141] Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2 FMA

[[-0.8113182   1.4845988   0.06532937]
 [-2.4427042   0.0992484   0.5912243 ]]
[[-0.8113182 ]
 [ 1.4845988 ]
 [ 0.06532937]]
after 0 training step, cross entropy is 1.89805
after 100 training step, cross entropy is 1.62943
after 200 training step, cross entropy is 1.40099
after 300 training step, cross entropy is 1.19732
after 400 training step, cross entropy is 1.02375
after 500 training step, cross entropy is 0.887612
after 600 training step, cross entropy is 0.790222
after 700 training step, cross entropy is 0.727325
after 800 training step, cross entropy is 0.689437
after 900 training step, cross entropy is 0.667623
after 1000 training step, cross entropy is 0.655075
after 1100 training step, cross entropy is 0.647813
after 1200 training step, cross entropy is 0.643196
after 1300 training step, cross entropy is 0.639896
after 1400 training step, cross entropy is 0.637246
after 1500 training step, cross entropy is 0.635031
after 1600 training step, cross entropy is 0.633027
after 1700 training step, cross entropy is 0.631151
after 1800 training step, cross entropy is 0.629368
after 1900 training step, cross entropy is 0.627724
after 2000 training step, cross entropy is 0.626172
after 2100 training step, cross entropy is 0.624696
after 2200 training step, cross entropy is 0.623293
after 2300 training step, cross entropy is 0.622006
after 2400 training step, cross entropy is 0.620801
after 2500 training step, cross entropy is 0.619664
after 2600 training step, cross entropy is 0.618592
after 2700 training step, cross entropy is 0.617622
after 2800 training step, cross entropy is 0.616723
after 2900 training step, cross entropy is 0.615883
after 3000 training step, cross entropy is 0.615096
after 3100 training step, cross entropy is 0.614397
after 3200 training step, cross entropy is 0.613756
after 3300 training step, cross entropy is 0.61316
after 3400 training step, cross entropy is 0.612608
after 3500 training step, cross entropy is 0.612126
after 3600 training step, cross entropy is 0.611689
after 3700 training step, cross entropy is 0.611285
after 3800 training step, cross entropy is 0.610913
after 3900 training step, cross entropy is 0.610594
after 4000 training step, cross entropy is 0.610309
after 4100 training step, cross entropy is 0.610046
after 4200 training step, cross entropy is 0.609804
after 4300 training step, cross entropy is 0.609603
after 4400 training step, cross entropy is 0.609423
after 4500 training step, cross entropy is 0.609258
after 4600 training step, cross entropy is 0.609106
after 4700 training step, cross entropy is 0.608983
after 4800 training step, cross entropy is 0.608874
after 4900 training step, cross entropy is 0.608772
[[ 0.02476983  0.56948674  1.6921941 ]
 [-2.1977348  -0.23668921  1.1143895 ]]
[[-0.45544702]
 [ 0.49110925]
 [-0.98110336]]



發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章