Tensorflow 2.0 線性迴歸

import tensorflow as tf
import matplotlib.pyplot as plt

print(tf.keras.__version__)


class Model(object):
    def __init__(self):
        self.W = tf.Variable(5.0)
        self.b = tf.Variable(0.0)

    def __call__(self, x):
        return self.W * x + self.b


model = Model()

assert model(3.0).numpy() == 15.0


@tf.function
def loss(predicted_y, desired_y):
    return tf.losses.MSE(desired_y, predicted_y)


TRUE_W = 3.0
TRUE_b = 2.0
NUM_EXAMPLES = 1000

inputs = tf.random.normal(shape=[NUM_EXAMPLES])
noise = tf.random.normal(shape=[NUM_EXAMPLES])
outputs = inputs * TRUE_W + TRUE_b + noise


plt.scatter(inputs, outputs, c='b')
plt.scatter(inputs, model(inputs), c='r')
plt.show()

print('Current loss: '),
print(loss(model(inputs), outputs).numpy())

opt = tf.optimizers.SGD(0.1)


@tf.function
def train(models, input_data, output_data, learning_rate):
    with tf.GradientTape() as t:
        _loss = loss(models(input_data), output_data)
        dw, db = t.gradient(_loss, [models.W, models.b])
        opt.apply_gradients(zip([dw, db], [models.W, models.b]))


# 收集 W 和 b 的歷史數值,用於顯示
Ws, bs = [], []
epochs = range(10)
for epoch in epochs:
    Ws.append(model.W.numpy())
    bs.append(model.b.numpy())
    current_loss = loss(model(inputs), outputs)

    train(model, inputs, outputs, learning_rate=0.01)
    print('Epoch %2d: W=%1.2f b=%1.2f, loss=%2.5f' %
          (epoch, Ws[-1], bs[-1], current_loss))

# 顯示所有
plt.plot(epochs, Ws, 'r', epochs, bs, 'b')
plt.plot([TRUE_W] * len(epochs), 'r--', [TRUE_b] * len(epochs), 'b--')
plt.legend(['W', 'b', 'true W', 'true_b'])
plt.show()

 

發佈了53 篇原創文章 · 獲贊 53 · 訪問量 29萬+
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章