tf.reset_default_graph()
optim = tf.train.AdamOptimizer(0.01)
a = tf.Variable(initial_value=0.,name='a')
b = 2*a
y = a+b
tvars = tf.trainable_variables()
# grads = tf.gradients(y,tvars)
# app = optim.apply_gradients(zip(grads,tvars))
grads_and_vars = optim.compute_gradients(y,tvars)
app = optim.apply_gradients(grads_and_vars)
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
for i in range(3):
sess.run(app)
print('a={:.2f}, b={:.2f}'.format(a.eval(),b.eval()))