基於tensorflow的RBF神經網絡股票預測

"""
Created on Mon Mar 30 2020
@author:LC
"""
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
date_axis = np.linspace(0,30,31)
close_price = np.array([911.6,917,932.01,975,976.3,987.1,982.98,979.86,996.35,984,1031.86,1025,987.9,981.2,999.82,984.86,976.06,982.52,981.5,986.09,975.93,968,963.5,947.5,955.87,957.02,953.98,946.36,963,965.03])
open_price = np.array([907,917,940,932.5,969.97,978.3,992,985,985,985.99,1004.52,1025,1020,985.8,986,995.05,979.3,980.93,989.96,981.3,967.8,975.45,968,961.5,955,960,962.3,958.31,945.97,961.97])

def draw_figure():
    plt.figure()
    for i in range(30):
        date = np.zeros([2])
        date[0] = i+1
        date[1] = i+1
        gain = np.zeros([2])
        gain[0] = open_price[i]
        gain[1] = close_price[i]
        if open_price[i] < close_price[i]:
            plt.plot(date,gain,'r',lw=8)
        else:
            plt.plot(date,gain,'g',lw=8)

datenormal = np.zeros([30,1])
pricenormal = np.zeros([30,1])
max_price = max(close_price)
min_price = min(close_price)

for i in range(30):   #歸一化處理
    datenormal[i] = (i+1-1) / (30-1)
    pricenormal[i] = (close_price[i]-min_price) / (max_price-min_price)

x=tf.placeholder(tf.float32,shape=[None,1])
y=tf.placeholder(tf.float32,shape=[None,1])

w=tf.Variable(tf.random_uniform([20,1],seed=1))  #隱層至輸出層權重
b = tf.Variable(tf.constant(0.1))     #輸出層偏置

c=tf.Variable(tf.random_normal([20,1],seed=1))     #中心點
delta=tf.Variable(tf.random_normal([1,20],seed=1))   
dist=tf.reduce_sum(tf.square(tf.subtract(tf.tile(x,[20,1]),c)),1)  #歐氏距離(維度:1*20)
delta_2 = tf.square(delta)
  
rbf_out = tf.exp(tf.multiply(-1.0,tf.divide(dist,tf.multiply(2.0,delta_2))))    #通過徑向基函數算得隱層輸出

y_pred=tf.nn.relu(tf.matmul(rbf_out,w)+b)   #預測輸出

mse = tf.reduce_mean(tf.square(y-y_pred))  #損失函數爲均方誤差

train_step = tf.train.GradientDescentOptimizer(0.01).minimize(mse)

with tf.Session() as sess:
    tf.global_variables_initializer().run()
    pred=np.zeros([30,1])
    for step in range(1000):
        for i in range(len(datenormal)):
            sess.run(train_step,feed_dict={x:np.mat(datenormal)[i],y:np.mat(pricenormal)[i]})
        if step % 100 == 0:
            for j in range(len(datenormal)):
                loss = sess.run(mse,feed_dict={x:np.mat(datenormal)[j],y:np.mat(pricenormal)[j]})
                y_ = sess.run(y_pred,feed_dict={x:np.mat(datenormal)[j]})
                pred[j,0] = (y_*(max_price-min_price)+min_price)[0]
            draw_figure()
            plt.plot(date_axis[1:],pred.tolist(),'b',linewidth=2)
            plt.grid()
            plt.show()
            print("After %d training steps,mse on all data is %g"%(step,loss))

在這裏插入圖片描述
After 0 training steps,mse on all data is 0.119859
在這裏插入圖片描述
After 100 training steps,mse on all data is 8.58006e-07

在這裏插入圖片描述
After 200 training steps,mse on all data is 4.60739e-07

在這裏插入圖片描述
After 300 training steps,mse on all data is 2.34644e-05
在這裏插入圖片描述
After 400 training steps,mse on all data is 3.86041e-05
在這裏插入圖片描述
After 500 training steps,mse on all data is 4.16582e-05
在這裏插入圖片描述
After 600 training steps,mse on all data is 4.10633e-05

在這裏插入圖片描述
After 700 training steps,mse on all data is 3.91262e-05

在這裏插入圖片描述
After 800 training steps,mse on all data is 3.70629e-05

在這裏插入圖片描述
After 900 training steps,mse on all data is 3.54646e-05

發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章