import tensorflow as tf
import numpy as np
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
# 生成數據
x_data = np.random.rand(100).astype(np.float32)
y_data = x_data * 0.2+0.5
# w和 b
w = tf.Variable(tf.random_uniform([1],-1,1))
b = tf.Variable(tf.zeros(1))
#線性函數
y = x_data * w+b
#loss
loss = tf.reduce_mean(tf.square(y-y_data))
tra = tf.train.GradientDescentOptimizer(0.5) #梯度下降
train_op = tra.minimize(loss)
#初始化
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)
for step in range(201):
sess.run(train_op)
if step%20==0:
print(step,sess.run(loss),sess.run(w),sess.run(b))