dcnn 1d网络

import os
import torch
import torch.nn.functional as F
import torch.nn as nn

class dilated_cnn(nn.Module):
    def __init__(self,  in_channel):
        super(dilated_cnn, self).__init__()

        self.fconv_d1 = nn.Conv1d(in_channels=in_channel, out_channels=64, kernel_size=5, dilation =1, padding=2)
        self.fconv_d2 = nn.Conv1d(in_channels=64, out_channels=64, kernel_size=5, dilation =2, padding=(5-1)*2//2)
        self.fconv_d3 = nn.Conv1d(in_channels=64, out_channels=64, kernel_size=5, dilation =4, padding=(5-1)*4//2 )
        self.fconv_d4 = nn.Conv1d(in_channels=64, out_channels=64, kernel_size=5, dilation =8, padding=(5-1)*8//2)
        self.fconv1x1 = nn.Conv1d(in_channels=64, out_channels=in_channel, kernel_size=1, dilation =1)
        self.bn1 = nn.BatchNorm1d(num_features=64)
        self.bn2 = nn.BatchNorm1d(num_features=64)
        self.bn3 = nn.BatchNorm1d(num_features=64)
        self.bn4 = nn.BatchNorm1d(num_features=64)






    def forward(self, x1):
        x = F.relu(self.bn1(self.fconv_d1(x1)))
        #print("x1:",x.shape)
        x = F.relu(self.bn2(self.fconv_d2(x)))
        #print("x2:",x.shape)
        #x = self.dropout1(x)

        x = F.relu(self.bn3(self.fconv_d3(x)))
        #print("x3:",x.shape)
        x = F.relu(self.bn4(self.fconv_d4(x)))
        #print("x4:",x.shape)
        res_glu_1 = F.tanh(x)*torch.sigmoid(x)
        x = self.fconv1x1(res_glu_1)
        x = x + x1

        return x
        
if __name__=="__main__":
    dcnn = dilated_cnn(in_channel=16)
    input = torch.randn(20, 16, 100)
    x = dcnn(input)
    print("x:",x.shape)
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值