import tensorflow as tf
from tensorflow.keras import layers, models
import math
def Bottleneck(x, growthRate):
interChannels = 4 * growthRate
out = layers.BatchNormalization()(x)
out = tf.nn.relu(out)
out = layers.Conv1D(interChannels, kernel_size=1, use_bias=False)(out)
out = layers.BatchNormalization()(out)
out = tf.nn.relu(out)
out = layers.Conv1D(growthRate, kernel_size=3, padding='same', use_bias=False)(out)
out = layers.Concatenate()([x, out])
return out
def SingleLayer(x, growthRate):
out = layers.BatchNormalization()(x)
out = tf.nn.relu(out)
out = layers.Conv1D(growthRate, kernel_size=3, padding='same', use_bias=False)(out)
out = layers.Concatenate()([x, out])
return out
def Transition(x, nOutChannels, down=False):
out = layers.BatchNormalization()(x)
out = tf.nn.relu(out)
out = layers.Conv1D(nOutChannels, kernel_size=1, use_bias=False)(out)
if down:
out = layers.AveragePooling1D(pool_size=2)(out)
return out
def ResidualUBlock(inputs, out_ch, mid_ch, layers_num, downsampling=True):
K = 9 # Kernel size
x = layers.Conv1D(out_ch, kernel_size=K, padding="same", use_bias=False)(inputs)
x = layers.BatchNormalization()(x)
x = tf.nn.leaky_relu(x)
encoders_out = []
for idx in range(layers_num):
encoder_x = layers.Conv1D(mid_ch, kernel_size=K, strides=2, padding="same", use_bias=False)(x)
encoder_x = layers.BatchNormalization()(encoder_x)
encoder_x = tf.nn.leaky_relu(encoder_x)
# print('encoder_x',e
ResNet-DenseNet Architecture with ResU Blocks(Tensorflow版本)
于 2024-08-13 23:39:16 首次发布