import tensorflow as tf
from tensorflow.keras import layers
# 构建AlexNet模型
class AlexNet(layers.Layer):
def __init__(self):
super(AlexNet, self).__init__()
# 卷积层:96个卷积核,卷积核为11*11,步幅为4,激活函数relu
self.c1 = layers.Conv2D(96, kernel_size=11, strides=4, activation='relu')
# 池化:窗口大小3*3、步幅为2
self.s1 = layers.MaxPool2D(pool_size=3, strides=2)
# 卷积层:256个卷积核,卷积核为5*5,步幅为1,padding为sane,激活函数为relu
self.c2 = layers.Conv2D(256, kernel_size=5, padding='same', activation='relu')
# 池化:窗口大小3*3、步幅为2
self.s2 = layers.MaxPool2D(pool_size=3, strides=2)
# 卷积层:384个卷积核,卷积核为3*3,步幅为1,padding为same,激活函数relu
self.c3 = layers.Conv2D(384, kernel_size=3, padding="same", activation="relu")
# 卷积层:384个卷积核,卷积核为3*3,步幅为1,padding为same,激活函数relu
self.c4 = layers.Conv2D(384, kernel_size=3, padding="same", activation="relu")
# 卷积层:256个卷积核,卷积核为3*3,步幅为1,padding为same,激活函数relu
self.c5 = layers.Conv2D(256, kernel_size=3, padding="same", activation="relu")
# 池化:窗口大小为3*3、步幅为2
self.s3 = layers.MaxPool2D(pool_size=3, strides=2)
# 伸展为1维向量
self.f1 = layers.Flatten()
# 全连接层:4096个神经元,激活函数relu
self.d1 = layers.Dense(4096)
self.r6 = layers.Activation('relu')
# 随机失活0.5
self.dro1 = layers.Dropout(0.5)
# 全连接层:4096个神经元,激活函数relu
self.d2 = layers.Dense(4096)
self.r7 = layers.Activation('relu')
# 随机失活0.5
self.dro2 = layers.Dropout(0.5)
# 输出层10个神经元
self.d3 = layers.Dense(10)
def call(self, x):
x = self.c1(x)
x = self.s1(x)
x = self.c2(x)
x = self.s2(x)
x = self.c3(x)
x = self.c4(x)
x = self.c5(x)
x = self.s3(x)
x = self.f1(x)
x = self.d1(x)
x = self.r6(x)
x = self.dro1(x)
x = self.d2(x)
x = self.r7(x)
x = self.dro2(x)
x = self.d3(x)
return x
if __name__=="__main__":
model = AlexNet()
a = tf.ones((1, 224, 224, 1)) # 这部可以用来测试搭建的网络输入输出是否正确
b = model(a)
print(b)
基于tensorflow实现AlexNet
最新推荐文章于 2025-05-05 10:24:21 发布