lr = 0.01
max_grad_norm = 5
tvars = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(loss, tvars), max_grad_norm)
opt = tf.train.GradientDescentOptimizer(lr)
# opt = tf.train.AdamOptimizer(lr)
optimizer = opt.apply_gradients(zip(grads, tvars))
Tensorflow: RNN/LSTM gradient clipping
最新推荐文章于 2025-02-05 19:18:03 发布