self.conv1 = layers.Conv2D(filter_num, (3, 3), strides=stride, padding=‘same’)
self.bn1 = layers.BatchNormalization()
self.relu = layers.Activation(‘relu’)
self.conv2 = layers.Conv2D(filter_num, (3, 3), strides=1, padding=‘same’)
self.bn2 = layers.BatchNormalization()
if stride != 1:
self.downsample = Sequential()
self.downsample.add(layers.Conv2D(filter_num, (1, 1), strides=stride))
else:
self.downsample = lambda x: x
def call(self, input, training=None):
out = self.conv1(input)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
identity = self.downsample(input)
output = layers.add([out, identity])
output = tf.nn.relu(output)
return output
第二个残差模块
第二个残差模块
class Block(layers.Layer):
def init(self, filters, downsample=False, stride=1):
super(Block, self).init()
self.downsample = downsample
self.conv1 = layers.Conv2D(filters, (1, 1), strides=stride, padding=‘same’)
self.bn1 = layers.BatchNormalization()
self.relu = layers.Activation(‘relu’)
self.conv2 = layers.Conv2D(filters, (3, 3), strides=1, padding=‘same’)
se