layers = nn.Sequential(OrderedDict([
('conv1', nn.Sequential(nn.Conv2d(3, 96, kernel_size=7, stride=2),
nn.ReLU(inplace=True),#激活函数,同时inplace设置为True,原值将会被覆盖。
nn.LocalResponseNorm(2),#归一化
nn.MaxPool2d(kernel_size=3, stride=2))),
('conv2', nn.Sequential(nn.Conv2d(96, 256, kernel_size=5, stride=2),
nn.ReLU(inplace=True),
nn.LocalResponseNorm(2),
nn.MaxPool2d(kernel_size=3, stride=2))),
('conv3', nn.Sequential(nn.Conv2d(256, 512, kernel_size=3, stride=1),
nn.ReLU(inplace=True))),
('fc4', nn.Sequential(nn.Linear(512 * 3 * 3, 512),
nn.ReLU(inplace=True))),
('fc5', nn.Sequential(nn.Dropout(0.5),
nn.Linear(512, 512),
nn.ReLU(inplace=True)))]))
for l in layers:
print(l)
输出结果为:
Sequential(
(0): Conv2d(3, 96, kernel_size=(7, 7), stride=(2, 2))
(1): ReLU(inplace=True)
(2): LocalResponseNorm(2, alpha=0.0001, beta=0.75, k=1.0)
(3): MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False)
)
Sequential(
(0): Conv2d(96, 256, kernel_size=(5, 5), stride=(2, 2))
(1): ReLU(inplace=True)
(2): LocalResponseNorm(2, alpha=0.0001, beta=0.75, k=1.0)
(3): MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False)
)
Sequential(
(0): Conv2d(256, 512, kernel_size=(3, 3), stride=(1, 1))
(1): ReLU(inplace=True)
)
Sequential(
(0): Linear(in_features=4608, out_features=512, bias=True)
(1): ReLU(inplace=True)
)
Sequential(
(0): Dropout(p=0.5, inplace=False)
(1): Linear(in_features=512, out_features=512, bias=True)
(2): ReLU(inplace=True)
)