模型的参数!
model.parameters() / model.named_parameters()
for parameter in m.named_parameters():
print(parameter)
: ('weight', Parameter containing:
tensor([[ 1.0597, 1.1796, 0.8247],
[-0.5080, -1.2635, -1.1045],
[ 0.0593, 0.2469, -1.4299],
[-0.4926, -0.5457, 0.4793]], requires_grad=True))
('bias', Parameter containing:
tensor([ 0.3634, 0.2015, -0.8525], requires_grad=True))
Building Blocks
net = nn.Sequential(
MyLinear(4, 3),
nn.ReLU(),
MyLinear(3, 1)
)
sample_input = torch.randn(4)
net(sample_input)
: tensor([-0.6749], grad_fn=<AddBackward0>)
children() or named_children()
中间层(直接层)
net = Net()
for child in net.named_children():
print(child)
: ('l0', MyLinear())
('l1', MyLinear())
modules() and named_modules()
递归地表示出所有的中间层(以及中间层的子层)
class BigNet(nn.Module):
def __init__(self):
super().__init__()
self.l1 = MyLinear(5, 4)
self.net = Net()
def forward(self, x):
return self.net(self.l1(x))
big_net = BigNet()
for module in big_net.named_modules():
print(module)
: ('', BigNet(
(l1): MyLinear()
(net): Net(
(l0): MyLinear()
(l1): MyLinear()
)
))
('l1', MyLinear())
('net', Net(
(l0): MyLinear()
(l1): MyLinear()
))
('net.l0', MyLinear())
('net.l1', MyLinear())