Optimizer
torch.optim
每个 optimizer
中有一个 param_groups
维护一组参数更新,其中包含了诸如学习率之类的超参数。通过访问 pprint(opt.param_group)
可以查看或者修改
[
{'dampening': 0,
'lr': 0.01,
'momentum': 0,
'nesterov': False,
'params': [Parameter containing:
tensor([[-0.4239, 0.2810, 0.3866],
[ 0.1081, -0.3685, 0.4922],
[ 0.1043, 0.5353, -0.1368],
[ 0.5171, 0.3946, -0.3541],
[ 0.2255, 0.4731, -0.4114]], requires_grad=True),
Parameter containing:
tensor([ 0.3145, -0.5053, -0.1401, -0.1902, -0.5681], requires_grad=True)],
'weight_decay': 0},
{'dampening': 0,
'lr': 0.01,
'momentum': 0,
'nesterov': False,
'params': [Parameter containing:
tensor([[[[ 0.0476, 0.2790],
[ 0.0285, -0.1737]],
[[-0.0268, 0.2334],
[-0.0095, -0.1972]],
[[-0.1588, -0.1018],
[ 0.2712, 0.2416]]]], requires_grad=True),
Parameter containing:
tensor([ 0.0