对于 sn-p 1:
with tf.Graph().as_default():
a = tf.Variable(1, name="a_var")
assign_op = tf.assign(a, tf.add(a,1,name='ADD'))
b = tf.Variable(112)
b = b.assign(a)
print(a)
print(b)
print(assign_op)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
print (sess.run(a))
print ("assign_op : ",sess.run(assign_op))
print(" b :- ",b.eval())
print (sess.run(a))
print (sess.run(a))
print ("assign_op : ",sess.run(assign_op))
print (sess.run(a))
print (sess.run(a))
writer = tf.summary.FileWriter("/tmp/log", sess.graph)
writer.close()
这个sn-p 1的o/p:
<tf.Variable 'a_var:0' shape=() dtype=int32_ref>
Tensor("Assign_1:0", shape=(), dtype=int32_ref)
Tensor("Assign:0", shape=(), dtype=int32_ref)
1
assign_op : 2
b :- 2
2
2
assign_op : 3
3
3
have a look at tensorboard's computational graph
注意点:
- 第一个变量 'a' 被评估,所以你得到 o/p : 1
- next sess.run(assign_op), executes => assign_op = tf.assign(a, tf.add(a,1,name='ADD')),其作用是更新变量'a'(=2 ) 并创建 'assign_op' 这是张量类型的对象。
对于片段 2:see computational graph, you'll get the idea
(注意这里没有赋值操作的节点)
with tf.Graph().as_default():
a = tf.Variable(1, name="Var_a")
just_a = a + 1
print(a)
print(just_a)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
print (sess.run(a))
print (sess.run(a))
print ("just_a : ",sess.run(just_a))
print (sess.run(a))
print (sess.run(a))
print ("just_a : ",sess.run(just_a))
print (sess.run(a))
print (sess.run(a))
writer = tf.summary.FileWriter("/tmp/log", sess.graph)
writer.close()
sn-p 2 的 o/p:
enter code here
<tf.Variable 'Var_a:0' shape=() dtype=int32_ref>
Tensor("add:0", shape=(), dtype=int32)
1
1
just_a : 2
1
1
just_a : 2
1
1
对于片段 3:Computational graph
with tf.Graph().as_default():
a = tf.Variable(1, name="Var_name_a")
a = tf.assign(a, tf.add(a,5))
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
print (sess.run(a))
print (sess.run(a))
print (" a : ",sess.run(a))
print (sess.run(a))
print (sess.run(a))
print (" a : ",sess.run(a))
print (sess.run(a))
print (sess.run(a))
writer = tf.summary.FileWriter("/tmp/log", sess.graph)
writer.close()
sn-p 3 的o/p:
enter code here
6
11
a : 16
21
26
a : 31
36
41
现在,如果您查看此 sn-p 的计算图,它看起来与 sn-p 1 的计算图相似/完全相同。但这里的问题是代码 a = tf.assign(a, tf.add(a,5 )),不仅更新变量'a',而且再次创建另一个张量'a'。
现在刚刚创建的“a”将被
使用
print (sess.run(a))
这个“a”将是 a = tf.assign(a, tf.add(a,5))
来自 tf.add(a,5) 的 'a' 只不过是 'a'(=1) => a = tf.Variable(1, name="Var_name_a")... 所以 5+1= 6 被分配给原始“a”,而这个原始“a”被分配给新的“a”。
我还有一个例子可以一次性解释这个概念
check the graph here
enter code here
with tf.Graph().as_default():
w = tf.Variable(10,name="VAR_W") #initial val = 2
init_op = tf.global_variables_initializer()
# Launch the graph in a session.
with tf.Session() as sess:
# Run the variable initializer.
sess.run(init_op)
print(w.eval())
print(w) #type of 'w' before assign operation
#CASE:1
w = w.assign(w + 50)#adding 100 to var w
print(w.eval())
print(w) #type of 'w' after assign operation
# now if u try => w = w.assign(w + 50), u will get error bcoz newly
created 'w' is considered here which don't have assign attribute
#CASE:2
w = tf.assign(w, w + 100) #adding 100 to var w
print(w.eval())
#CASE:3
w = tf.assign(w, w + 300) #adding 100 to var w
print(w.eval())
writer = tf.summary.FileWriter("/tmp/log", sess.graph)
writer.close()
上面sn-p的o/p:
10
<tf.Variable 'VAR_W:0' shape=() dtype=int32_ref>
60
Tensor("Assign:0", shape=(), dtype=int32_ref)
210
660