发表于|更新于
|阅读量:
1.创建图
1 2 3 4 5
| m1 = tf.constant([[3,3]]) m2 = tf.constant([[2],[3]])
product = tf.matmul(m1,m2) print(product)
|
Tensor("MatMul_1:0", shape=(1, 1), dtype=int32)
1 2 3 4
| with tf.Session() as sess: result = sess.run(product) print(result)
|
[[15]]
2.变量
1 2 3 4 5 6 7 8 9 10 11 12 13
| x = tf.Variable([1,2]) a = tf.constant([3,3])
sub = tf.subtract(x,a)
add = tf.add(x,a)
init = tf.global_variables_initializer()
with tf.Session() as sess: sess.run(init) print(sess.run(sub)) print(sess.run(add))
|
[[-2 -1]]
[[4 5]]
1 2 3 4 5 6 7 8 9 10 11 12 13
| state = tf.Variable(0,name='counter') new_value = tf.add(state,1) update = tf.assign(state,new_value)
init = tf.global_variables_initializer()
with tf.Session() as sess: sess.run(init) print(sess.run(state)) for _ in range(5): sess.run(update) print(sess.run(state))
|
0
1
2
3
4
5
3.Fetch & Feed
1 2 3 4 5 6 7 8 9 10 11 12
| input1 = tf.constant(3.0) input2 = tf.constant(2.0) input3 = tf.constant(5.0)
add = tf.add(input2,input3)
mul = tf.multiply(input1,add)
with tf.Session() as sess: result = sess.run([mul,add]) print(result)
|
[21.0, 7.0]
1 2 3 4 5 6 7 8 9
|
input1 = tf.placeholder(tf.float32) input2 = tf.placeholder(tf.float32) output = tf.multiply(input1,input2)
with tf.Session() as sess: print(sess.run(output,feed_dict={input1:[7.],input2:[2.]}))
|
[14.]
4.梯度下降优化器
1 2
| import tensorflow as tf import numpy as np
|
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25
| x_data = np.random.rand(100) y_data = x_data*0.5 + 0.2
b = tf.Variable(0.) k = tf.Variable(0.) y = k*x_data + b
loss = tf.reduce_mean(tf.square(y-y_data))
optimizer = tf.train.GradientDescentOptimizer(0.2)
train = optimizer.minimize(loss)
init = tf.global_variables_initializer()
with tf.Session() as sess: sess.run(init) for step in range(200): sess.run(train) if step%20==0: print(step,sess.run([k,b]))
|
0 [0.112656556, 0.18330559]
20 [0.33603606, 0.2910507]
40 [0.40696397, 0.25166383]
60 [0.44720966, 0.229315]
80 [0.47004583, 0.21663386]
100 [0.48300347, 0.20943834]
120 [0.4903559, 0.20535547]
140 [0.4945278, 0.20303877]
160 [0.496895, 0.20172425]
180 [0.49823818, 0.20097837]