Show me your code
1. 模型 model.py
import torch
from torch import nn
class ConvBlock(nn.Module):
"""
一层卷积:
- 卷积层
- 批规范化层
- 激活层
"""
def __init__(self, in_channels, out_channels,
kernel_size=3, stride=1, padding=1):
super().__init__()
self.conv = nn.Conv2d(in_channels=in_channels, out_channels=out_channels,
kernel_size=kernel_size, stride=stride,padding=padding)
self.bn = nn.BatchNorm2d(num_features=out_channels)
self.relu = nn.ReLU()
def forward(self, x):
x = self.conv(x)
x = self.bn(x)
x = self.relu(x)
return x
class LeNet(nn.Module):
def __init__(self, num_classes=10):
super().__init__()
self.feature_extractor = nn.Sequential(
ConvBlock(in_channels=3,
out_channels=6,
kernel_size=5,
stride=1,
padding=0),
nn.MaxPool2d(kernel_size=2, stride=2, padding=0),
ConvBlock(in_channels=6,
out_channels=16,
kernel_size=5,
stride=1,
padding=0),
nn.MaxPool2d(kernel_size=2, stride=2, padding=0),
)
self.classifier = nn.Sequential(
nn.Flatten(),
nn.Linear(in_features=400, out_features=120),
nn.ReLU(),
nn.Linear(in_features=120, out_features=84),
nn.ReLU(),
nn.Linear(in_features=84, out_features=num_classes)
)
def forward(self, x):
x = self.feature_extractor(x)
x = self.classifier(x)
return x
class Vgg16(nn.Module):
def __init__(self, n_classes=1000):
super().__init__()
self.feature_extractor = nn.Sequential(
ConvBlock(in_channels=3,
out_channels=64,
kernel_size=3,
stride=1,
padding=1),
ConvBlock(in_channels=64,
out_channels=64,
kernel_size=3,
stride=1,
padding=1),
nn.MaxPool2d(kernel_size=2, stride=2, padding=0),
ConvBlock(in_channels=64,
out_channels=128,
kernel_size=3,
stride=1,
padding=1),
ConvBlock(in_channels=128,
out_channels=128,
kernel_size=3,
stride=1,
padding=1),
nn.MaxPool2d(kernel_size=2, stride=2, padding=0),
ConvBlock(in_channels=128,
out_channels=256,
kernel_size=3,
stride=1,
padding=1),
ConvBlock(in_channels=256,
out_channels=256,
kernel_size=3,
stride=1,
padding=1),
ConvBlock(in_channels=256,
out_channels=256,
kernel_size=3,
stride=1,
padding=1),
nn.MaxPool2d(kernel_size=2, stride=2, padding=0),
ConvBlock(in_channels=256,
out_channels=512,
kernel_size=3,
stride=1,
padding=1),
ConvBlock(in_channels=512,
out_channels=512,
kernel_size=3,
stride=1,
padding=1),
ConvBlock(in_channels=512,
out_channels=512,
kernel_size=3,
stride=1,
padding=1),
nn.MaxPool2d(kernel_size=2, stride=2, padding=0),
ConvBlock(in_channels=512,
out_channels=512,
kernel_size=3,
stride=1,
padding=1),
ConvBlock(in_channels=512,
out_channels=512,
kernel_size=3,
stride=1,
padding=1),
ConvBlock(in_channels=512,
out_channels=512,
kernel_size=3,
stride=1,
padding=1),
nn.MaxPool2d(kernel_size=2, stride=2, padding=0),
)
self.classifier = nn.Sequential(
nn.Flatten(),
nn.Linear(in_features=7 * 7 * 512, out_features=4096),
nn.ReLU(),
nn.Linear(in_features=4096, out_features=4096),
nn.ReLU(),
nn.Linear(in_features=4096, out_features=n_classes)
)
def forward(self, x):
x = self.feature_extractor(x)
x = self.classifier(x)
return x
"""
ResBlock
"""
class ResConvBlock(nn.Module):
"""
虚线块,每一个大的重复逻辑块前面,第一个短接块就是这个
实现逻辑:
y = F(x) + Conv(x)
"""
def __init__(self, in_channels, out_channels, stride):
super().__init__()
self.stage = nn.Sequential(
nn.Conv2d(in_channels=in_channels,
out_channels=out_channels[0],
kernel_size=1,
stride=stride,
padding=0,
bias=False),
nn.BatchNorm2d(num_features=out_channels[0]),
nn.ReLU(),
nn.Conv2d(in_channels=out_channels[0],
out_channels=out_channels[1],
kernel_size=3,
padding=1,
stride