首先需要了解结构图

首先实现mobileNet部分
'''
这一部分是mobilenet-ssd的特征提取部分
也就是mobilenet的部分
但是也不是完全的mobilenet
去掉了最后的全连接层以及分类层
保留特征提取的部分
mobileNet主要的成就是将传统的卷基层分为深度可分离卷积与点卷积
conv -> depthwise + pointConv
'''
import sys
sys.path.append("/home/mooc/桌面/keras_project/Mobilenet-ssd-keras/models")
import keras
import numpy as np
import cv2
import keras.backend as K
import keras.layers as KL
from depthwise_conv2d import DepthwiseConvolution2D #这个是自定义的层
from keras.models import Model
from keras.layers import Input, Lambda, Activation,Conv2D, Convolution2D, MaxPooling2D, ZeroPadding2D, Reshape, Concatenate,BatchNormalization, Add, Conv2DTranspose
from keras.regularizers import l2
def mobilenet(input_tensor):
print("input_tensor",input_tensor.shape)
if input_tensor is None:
input_tensor = Input(shape=(300,300,3)) #input
# this part is traditional convolution, no have depthwise
x = ZeroPadding2D(padding=((1, 1), (1, 1)), name='conv1_padding')(input_tensor)# add zero at around input
x = Convolution2D(32, (3, 3), strides=(2, 2), padding='valid', use_bias=False,name="conv0")(x) #conv0
x = BatchNormalization( momentum=0.99, epsilon=0.00001 , name = "conv0/bn")(x)# 标准化
x = Activation('relu')(x) #relu
# conv1 : depthwise + point 点卷积就是1x1x通道 it's function is extend channel
x = DepthwiseConvolution2D(32, (3, 3), strides=(1, 1), padding='same', use_bias=False, name="conv1/dw")(x)
x = BatchNormalization( momentum=0.99, epsilon=0.00001 , name="conv1/dw/bn")(x)
x = Activation('relu')(x)
x = Convolution2D(64, (1, 1), strides=(1, 1), padding='same', use_bias=False, name="conv1")(x)
x = BatchNormalization( momentum=0.99, epsilon=0.00001 , name="conv1/bn")(x)
x = Activation('relu')(x)
print ("conv1 shape: ", x.shape)
# conv2
x = ZeroPadding2D(padding=((1, 1), (1, 1)), name='conv2_padding')(x) #每一个卷积操作之前,都填充,保证输入符合规定
x = DepthwiseConvolution2D(64, (3, 3), strides=(2, 2), padding='valid', use_bias=False,name="conv2/dw")(x)
x = BatchNormalization( momentum=0.99, epsilon=0.00001 , name="conv2/dw/bn")(x)
x = Activation('relu')(x)
x = Convolution2D(128, (1, 1), strides=(1, 1), padding='same', use_bias=False,name="conv2")(x)
x = BatchNormalization( momentum=0.99, epsilon=0.00001 , name="conv2/bn")(x)
x = Activation('relu')(x)
# conv3
x = DepthwiseConvolution2D(128, (3, 3), strides=(1, 1), padding='same', use_bias=False,name="conv3/dw")(x)
x = BatchNormalization( momentum=0.99, epsilon=0.00001 , name="conv3/dw/bn")(x)
x = Activation

最低0.47元/天 解锁文章
3032

被折叠的 条评论
为什么被折叠?



