参考: https://blog.youkuaiyun.com/rocking_struggling/article/details/108357089
class MyNetk(t.nn.Module):
def __init__(self):
super(MyNetk,self).__init__()
# print("卷积网络")
self.conv1 = nn.Sequential(
nn.Conv2d(
in_channels = 3, #(, 64, 64, 3)
out_channels = 16,
kernel_size = 3,
stride = 1,
padding = 1
), ##( , 64, 64, 16)
nn.ReLU(),
nn.MaxPool2d(kernel_size = 2)
) ##( , 32, 32, 16)
self.conv2 = nn.Sequential(
nn.Conv2d(16,32,3,1,1),
nn.ReLU(),
nn.MaxPool2d(2)
)
self.conv3 = nn.Sequential(
nn.Conv2d(32,64,3,1,1),
nn.ReLU(),
nn.MaxPool2d(2)
)
self.conv4 = nn.Sequential(
nn.Conv2d(64,64,3,1,1),
nn.BatchNorm2d(64),
nn.BatchNorm2d(64),
nn.ReLU(),
)
def forward(self, x):
# print("前向传播")
x = self.conv1(x)
x = self.conv2(x)
x = self.conv3(x)
x = self.conv4(x)
x = self.conv4(x)
x = self.conv4(x)
x = self.conv4(x)
return x
if __name__ == '__main__':
net=MyNetk()
print(net)
for name, param in net.named_parameters():
print(name)
torch 网络中的模块不可以重复调用,网络的参数的数目在初始化时已经固定住了。如 之定义了一个conv4
但是组件中的其他重复模块是可以创建不同参数的。如conv4中nn.batchnorm2d的模块分别为4.1、4.2