TensorFlow2.0之CIFAR10与ResNet18实战
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers, Sequential
from tensorflow.compat.v1 import ConfigProto
from tensorflow.compat.v1 import InteractiveSession
config = ConfigProto()
config.gpu_options.allow_growth = True
session = InteractiveSession(config=config)
class BasicBlock(layers.Layer):
# 残差模块
def __init__(self, filter_num, stride=1):
super(BasicBlock, self).__init__()
# 第一个卷积单元
self.conv1 = layers.Conv2D(filter_num, (3, 3), strides=stride, padding='same')
self.bn1 = layers.BatchNormalization()
self.relu = layers.Activation('relu')
# 第二个卷积单元
self.conv2 = layers.Conv2D(filter_num, (3, 3), strides=1, padding='same')
self.bn2 = layers.BatchNormalization()
# 通过1*1卷积完成shape匹配
if stride != 1:
self.downsample = Sequential()
self.downsample.add(layers.Conv2D(filter_num, (1, 1), strides=stride))
# shape匹配,直接短接
else:
self.downsample = lambda x: x
def call(self, inputs, training=None):
# 前向计算函数
# [b, h, w, c]通过第一个卷积单元
out = self.conv1(inputs)

博客聚焦于TensorFlow2.0,开展了CIFAR10与ResNet18的实战。涉及深度学习、卷积和神经网络等信息技术领域知识,借助TensorFlow工具进行实践操作。
最低0.47元/天 解锁文章
7219

被折叠的 条评论
为什么被折叠?



