tf.cond() # if
tf.while_loop() # while
tf.tile() # multipy
tf.gather()
# 矩阵拼接
a : [16, 256]
b: [16, 256]
c = tf.stack([a, b], 1)
c: [16, 2, 256]
d= tf.concat([a, b], 1)
d:[16, 512]
# ---------------------------------
加载ckpt Key lstm/Attention_Model/basic_lstm_cell/kernel not found in checkpoint,解决方法(ubuntu):
saver = tf.train.Saver()
config = tf.ConfigProto(allow_soft_placement=True)
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.8)
config.gpu_options.allow_growth = True
sess = tf.Session(config=config)
sess.run(tf.global_variables_initializer())
ckpt_file = tf.train.latest_checkpoint('data/ckpt/nova.ckpt')
if ckpt_file is not None:
saver.restore(sess, ckpt_file)
# 这里可能是路径的问题 判断下就好