TensorFlow常用函數

tf.stack(value, axis=0, name=”)

重新弄組合數據

‘x’ is [1, 4]
‘y’ is [2, 5]
‘z’ is [3, 6]
stack([x, y, z], axis=0) => [[1, 4], [2, 5], [3, 6]]
stack([x, y, z], axis=1) => [[1, 2, 3], [4, 5, 6]]


tf.range(start, limit, delta=1, dtype=None, name=’range’)

產生順序數列

‘start’ is 3
‘limit’ is 18
‘delta’ is 3
tf.range(start, limit, delta) ==> [3, 6, 9, 12, 15]

‘limit’ is 5
tf.range(limit) ==> [0, 1, 2, 3, 4]


tf.shape(input)

返回input的結構

‘t’ is [ [ [1, 1, 1], [2, 2, 2] ], [ [3, 3, 3], [4, 4, 4] ] ]
shape(t) ==> [2, 2, 3]
直觀理解[2, 2, 3] ==>2:層;2:行;3:列


tf.reduce_sum(input_tensor, reduction_indives, keep_dims)

維度求和

‘x’ is [[1, 1, 1], [1, 1, 1]]
tf.reduce_sum(x) ==> 6
tf.reduce_sum(x, 0) ==> [2, 2, 2]列求和
tf.reduce_sum(x, 1) ==> [3, 3]行求和
tf.reduce_sum(x, 1, keep_dims=True) ==> [[3], [3]]
tf.reduce_sum(x, [0,1]) ==> 6


collection

變量集合

由以下兩個操作組成

tf.add_to_collection(name, value)
tf.get_collection(key, scope=None)

v1 = tf.get_variable(name='v1', shape=[1], initializer=tf.constant_initializer(0))  
tf.add_to_collection('loss', v1)  
v2 = tf.get_variable(name='v2s', shape=[1], initializer=tf.constant_initializer(2))  
tf.add_to_collection('loss', v2)  

with tf.Session() as sess:  
    sess.run(tf.initialize_all_variables())  
    get_var=tf.get_collection('loss') # get_vr=[v1, v2]
    one=get_var[0] # one=v1
    two=get_var[1] # two=v2
def weight_variable(shape):
    return tf.get_variable(name="weights", shape=shape,
                           initializer=tf.zeros_initializer(dtype=tf.float32))

def bias_variable(shape):
    return tf.get_variable(name="biases", shape=shape,
                           initializer=tf.zeros_initializer(dtype=tf.float32))

def fc_layer(input, in_dim, out_dim, layer_name):
    with tf.variable_scope(layer_name):
        W = weight_variable([in_dim, out_dim])
        b = bias_variable([out_dim])
        linear = tf.matmul(input, W) + b
        output = tf.sigmoid(linear)

with tf.variable_scope("MLP"):
    x = tf.placeholder(dtype=tf.float32, shape=[None, 1], name="x")
    y = tf.placeholder(dtype=tf.float32, shape=[None, 1], name="y")
    fc1 = fc_layer(x, 1, 8, "fc1")
    fc2 = fc_layer(fc1, 8, 1, "fc2")

mse_loss = tf.reduce_mean(tf.reduce_sum(tf.square(fc2 - y), axis=1))

trainable_var_key = tf.GraphKeys.TRAINABLE_VARIABLES # TensorFlow自帶的collection關鍵詞
all_vars = tf.get_collection(key=trainable_var_key, scope="MLP")
fc1_vars = tf.get_collection(key=trainable_var_key, scope="MLP/fc1")
fc2_vars = tf.get_collection(key=trainable_var_key, scope="MLP/fc2")
fc1_weight_vars = tf.get_collection(key=trainable_var_key, scope="MLP/fc1/weights")
fc1_bias_vars = tf.get_collection(key=trainable_var_key, scope="MLP/fc1/biases")

參考1
參考2

發佈了27 篇原創文章 · 獲贊 58 · 訪問量 9萬+
發表評論
所有評論
還沒有人評論,想成為第一個評論的人麼? 請在上方評論欄輸入並且點擊發布.
相關文章