深度学习模型的参数

计算模型的一些参数
!pip install torchstat
首先安装torchstat

from torchstat import stat
# 导入所有的网络架构
# 这里是我创建的一些模型
from model.wideresnet import WideResNet
from model.wideresnet_CDD import WideResNet_CDD
from model.resnet import Resnet
from model.resnet_CDD import Resnet_CDD
from model.se_resnet import Se_net
from model.se_resnet_CDD import Se_net_CDD
from model.densenet3 import DenseNet
from model.densenet3_CDD import DenseNet_CDD
from model.shake_shake import Shake_shake
from model.shake_shake_CDD import Shake_shake_CDD
from model.pyramid import Pyramid

# model = WideResNet(40, 100, widen_factor=12, dropRate=0.0, nc=3, nd=1)
model = Resnet_CDD(layers=32, num_classes=100, nc=3, nd=1, width=4)
stat(model, ( 3, 32, 32))

只能计算不同尺寸的单张图片的计算复杂度等一些参数
若需要计算多张图片
直接用现有结果✖图片数量即可。
计算结果如下

               module name  input shape output shape     params memory(MB)             MAdd            Flops  MemRead(B)  MemWrite(B) duration[%]   MemR+W(B)
0                    conv1    3  32  32   64  32  32     1728.0       0.25      3,473,408.0      1,769,472.0     19200.0     262144.0       1.57%    281344.0
1                      bn1   64  32  32   64  32  32      128.0       0.25        262,144.0        131,072.0    262656.0     262144.0       0.88%    524800.0
2           layer1.0.conv1   64  32  32   64  32  32    36864.0       0.25     75,431,936.0     37,748,736.0    409600.0     262144.0       2.41%    671744.0
3             layer1.0.bn1   64  32  32   64  32  32      128.0       0.25        262,144.0        131,072.0    262656.0     262144.0       0.79%    524800.0
4           layer1.0.conv2   64  32  32   64  32  32    36864.0       0.25     75,431,936.0     37,748,736.0    409600.0     262144.0       1.75%    671744.0
5             layer1.0.bn2   64  32  32   64  32  32      128.0       0.25        262,144.0        131,072.0    262656.0     262144.0       0.77%    524800.0
6        layer1.0.shortcut   64  32  32   64  32  32        0.0       0.25              0.0              0.0         0.0          0.0       0.01%         0.0
7           layer1.1.conv1   64  32  32   64  32  32    36864.0       0.25     75,431,936.0     37,748,736.0    409600.0     262144.0       1.77%    671744.0
8             layer1.1.bn1   64  32  32   64  32  32      128.0       0.25        262,144.0        131,072.0    262656.0     262144.0       0.78%    524800.0
9           layer1.1.conv2   64  32  32   64  32  32    36864.0       0.25     75,431,936.0     37,748,736.0    409600.0     262144.0       1.73%    671744.0
10            layer1.1.bn2   64  32  32   64  32  32      128.0       0.25        262,144.0        131,072.0    262656.0     262144.0       0.74%    524800.0
11       layer1.1.shortcut   64  32  32   64  32  32        0.0       0.25              0.0              0.0         0.0          0.0       0.01%         0.0
12          layer1.2.conv1   64  32  32   64  32  32    36864.0       0.25     75,431,936.0     37,748,736.0    409600.0     262144.0       1.77%    671744.0
13            layer1.2.bn1   64  32  32   64  32  32      128.0       0.25        262,144.0        131,072.0    262656.0     262144.0       0.74%    524800.0
14          layer1.2.conv2   64  32  32   64  32  32    36864.0       0.25     75,431,936.0     37,748,736.0    409600.0     262144.0       1.87%    671744.0
15            layer1.2.bn2   64  32  32   64  32  32      128.0       0.25        262,144.0        131,072.0    262656.0     262144.0       0.77%    524800.0
16       layer1.2.shortcut   64  32  32   64  32  32        0.0       0.25              0.0              0.0         0.0          0.0       0.01%         0.0
17          layer1.3.conv1   64  32  32   64  32  32    36864.0       0.25     75,431,936.0     37,748,736.0    409600.0     262144.0       2.18%    671744.0
18            layer1.3.bn1   64  32  32   64  32  32      128.0       0.25        262,144.0        131,072.0    262656.0     262144.0       1.20%    524800.0
19          layer1.3.conv2   64  32  32   64  32  32    36864.0       0.25     75,431,936.0     37,748,736.0    409600.0     262144.0       1.89%    671744.0
20            layer1.3.bn2   64  32  32   64  32  32      128.0       0.25        262,144.0        131,072.0    262656.0     262144.0       0.75%    524800.0
21       layer1.3.shortcut   64  32  32   64  32  32        0.0       0.25              0.0              0.0         0.0          0.0       0.01%         0.0
22          layer1.4.conv1   64  32  32   64  32  32    36864.0       0.25     75,431,936.0     37,748,736.0    409600.0     262144.0       1.71%    671744.0
23            layer1.4.bn1   64  32  32   64  32  32      128.0       0.25        262,144.0        131,072.0    262656.0     262144.0       0.76%    524800.0
24          layer1.4.conv2   64  32  32   64  32  32    36864.0       0.25     75,431,936.0     37,748,736.0    409600.0     262144.0       1.72%    671744.0
25            layer1.4.bn2   64  32  32   64  32  32      128.0       0.25        262,144.0        131,072.0    262656.0     262144.0       0.76%    524800.0
26       layer1.4.shortcut   64  32  32   64  32  32        0.0       0.25              0.0              0.0         0.0          0.0       0.01%         0.0
27               CDD1.drop   64  32  32   64  32  32        0.0       0.25              0.0              0.0         0.0          0.0       0.71%         0.0
28               CDD1.norm   64  32  32   64  32  32      128.0       0.25        262,144.0        131,072.0    262656.0     262144.0       0.82%    524800.0
29                CDD1.CDD   64  32  32   64  32  32    36928.0       0.25     75,497,472.0     37,814,272.0    409856.0     262144.0       2.43%    672000.0
30          layer2.0.conv1   64  32  32  128  16  16    73728.0       0.12     37,715,968.0     18,874,368.0    557056.0     131072.0       1.95%    688128.0
31            layer2.0.bn1  128  16  16  128  16  16      256.0       0.12        131,072.0         65,536.0    132096.0     131072.0       0.69%    263168.0
32          layer2.0.conv2  128  16  16  128  16  16   147456.0       0.12     75,464,704.0     37,748,736.0    720896.0     131072.0       2.17%    851968.0
33            layer2.0.bn2  128  16  16  128  16  16      256.0       0.12        131,072.0         65,536.0    132096.0     131072.0       0.76%    263168.0
34     layer2.0.shortcut.0   64  32  32  128  16  16     8192.0       0.12      4,161,536.0      2,097,152.0    294912.0     131072.0       1.64%    425984.0
35     layer2.0.shortcut.1  128  16  16  128  16  16      256.0       0.12        131,072.0         65,536.0    132096.0     131072.0       0.70%    263168.0
36          layer2.1.conv1  128  16  16  128  16  16   147456.0       0.12     75,464,704.0     37,748,736.0    720896.0     131072.0       1.72%    851968.0
37            layer2.1.bn1  128  16  16  128  16  16      256.0       0.12        131,072.0         65,536.0    132096.0     131072.0       0.74%    263168.0
38          layer2.1.conv2  128  16  16  128  16  16   147456.0       0.12     75,464,704.0     37,748,736.0    720896.0     131072.0       1.65%    851968.0
39            layer2.1.bn2  128  16  16  128  16  16      256.0       0.12        131,072.0         65,536.0    132096.0     131072.0       0.71%    263168.0
40       layer2.1.shortcut  128  16  16  128  16  16        0.0       0.12              0.0              0.0         0.0          0.0       0.01%         0.0
41          layer2.2.conv1  128  16  16  128  16  16   147456.0       0.12     75,464,704.0     37,748,736.0    720896.0     131072.0       1.68%    851968.0
42            layer2.2.bn1  128  16  16  128  16  16      256.0       0.12        131,072.0         65,536.0    132096.0     131072.0       0.75%    263168.0
43          layer2.2.conv2  128  16  16  128  16  16   147456.0       0.12     75,464,704.0     37,748,736.0    720896.0     131072.0       1.77%    851968.0
44            layer2.2.bn2  128  16  16  128  16  16      256.0       0.12        131,072.0         65,536.0    132096.0     131072.0       0.70%    263168.0
45       layer2.2.shortcut  128  16  16  128  16  16        0.0       0.12              0.0              0.0         0.0          0.0       0.01%         0.0
46          layer2.3.conv1  128  16  16  128  16  16   147456.0       0.12     75,464,704.0     37,748,736.0    720896.0     131072.0       1.68%    851968.0
47            layer2.3.bn1  128  16  16  128  16  16      256.0       0.12        131,072.0         65,536.0    132096.0     131072.0       0.74%    263168.0
48          layer2.3.conv2  128  16  16  128  16  16   147456.0       0.12     75,464,704.0     37,748,736.0    720896.0     131072.0       1.66%    851968.0
49            layer2.3.bn2  128  16  16  128  16  16      256.0       0.12        131,072.0         65,536.0    132096.0     131072.0       0.72%    263168.0
50       layer2.3.shortcut  128  16  16  128  16  16        0.0       0.12              0.0              0.0         0.0          0.0       0.01%         0.0
51          layer2.4.conv1  128  16  16  128  16  16   147456.0       0.12     75,464,704.0     37,748,736.0    720896.0     131072.0       1.70%    851968.0
52            layer2.4.bn1  128  16  16  128  16  16      256.0       0.12        131,072.0         65,536.0    132096.0     131072.0       0.71%    263168.0
53          layer2.4.conv2  128  16  16  128  16  16   147456.0       0.12     75,464,704.0     37,748,736.0    720896.0     131072.0       1.66%    851968.0
54            layer2.4.bn2  128  16  16  128  16  16      256.0       0.12        131,072.0         65,536.0    132096.0     131072.0       0.71%    263168.0
55       layer2.4.shortcut  128  16  16  128  16  16        0.0       0.12              0.0              0.0         0.0          0.0       0.01%         0.0
56               CDD2.drop  128  16  16  128  16  16        0.0       0.12              0.0              0.0         0.0          0.0       0.70%         0.0
57               CDD2.norm  128  16  16  128  16  16      256.0       0.12        131,072.0         65,536.0    132096.0     131072.0       0.76%    263168.0
58                CDD2.CDD   64  32  32  128  16  16    73856.0       0.12     37,748,736.0     18,907,136.0    557568.0     131072.0       1.92%    688640.0
59          layer3.0.conv1  128  16  16  256   8   8   294912.0       0.06     37,732,352.0     18,874,368.0   1310720.0      65536.0       2.11%   1376256.0
60            layer3.0.bn1  256   8   8  256   8   8      512.0       0.06         65,536.0         32,768.0     67584.0      65536.0       0.69%    133120.0
61          layer3.0.conv2  256   8   8  256   8   8   589824.0       0.06     75,481,088.0     37,748,736.0   2424832.0      65536.0       2.85%   2490368.0
62            layer3.0.bn2  256   8   8  256   8   8      512.0       0.06         65,536.0         32,768.0     67584.0      65536.0       0.67%    133120.0
63     layer3.0.shortcut.0  128  16  16  256   8   8    32768.0       0.06      4,177,920.0      2,097,152.0    262144.0      65536.0       1.62%    327680.0
64     layer3.0.shortcut.1  256   8   8  256   8   8      512.0       0.06         65,536.0         32,768.0     67584.0      65536.0       0.67%    133120.0
65          layer3.1.conv1  256   8   8  256   8   8   589824.0       0.06     75,481,088.0     37,748,736.0   2424832.0      65536.0       2.16%   2490368.0
66            layer3.1.bn1  256   8   8  256   8   8      512.0       0.06         65,536.0         32,768.0     67584.0      65536.0       0.69%    133120.0
67          layer3.1.conv2  256   8   8  256   8   8   589824.0       0.06     75,481,088.0     37,748,736.0   2424832.0      65536.0       2.05%   2490368.0
68            layer3.1.bn2  256   8   8  256   8   8      512.0       0.06         65,536.0         32,768.0     67584.0      65536.0       0.70%    133120.0
69       layer3.1.shortcut  256   8   8  256   8   8        0.0       0.06              0.0              0.0         0.0          0.0       0.01%         0.0
70          layer3.2.conv1  256   8   8  256   8   8   589824.0       0.06     75,481,088.0     37,748,736.0   2424832.0      65536.0       2.07%   2490368.0
71            layer3.2.bn1  256   8   8  256   8   8      512.0       0.06         65,536.0         32,768.0     67584.0      65536.0       0.69%    133120.0
72          layer3.2.conv2  256   8   8  256   8   8   589824.0       0.06     75,481,088.0     37,748,736.0   2424832.0      65536.0       2.05%   2490368.0
73            layer3.2.bn2  256   8   8  256   8   8      512.0       0.06         65,536.0         32,768.0     67584.0      65536.0       0.71%    133120.0
74       layer3.2.shortcut  256   8   8  256   8   8        0.0       0.06              0.0              0.0         0.0          0.0       0.01%         0.0
75          layer3.3.conv1  256   8   8  256   8   8   589824.0       0.06     75,481,088.0     37,748,736.0   2424832.0      65536.0       2.10%   2490368.0
76            layer3.3.bn1  256   8   8  256   8   8      512.0       0.06         65,536.0         32,768.0     67584.0      65536.0       0.70%    133120.0
77          layer3.3.conv2  256   8   8  256   8   8   589824.0       0.06     75,481,088.0     37,748,736.0   2424832.0      65536.0       1.99%   2490368.0
78            layer3.3.bn2  256   8   8  256   8   8      512.0       0.06         65,536.0         32,768.0     67584.0      65536.0       0.68%    133120.0
79       layer3.3.shortcut  256   8   8  256   8   8        0.0       0.06              0.0              0.0         0.0          0.0       0.01%         0.0
80          layer3.4.conv1  256   8   8  256   8   8   589824.0       0.06     75,481,088.0     37,748,736.0   2424832.0      65536.0       2.05%   2490368.0
81            layer3.4.bn1  256   8   8  256   8   8      512.0       0.06         65,536.0         32,768.0     67584.0      65536.0       0.66%    133120.0
82          layer3.4.conv2  256   8   8  256   8   8   589824.0       0.06     75,481,088.0     37,748,736.0   2424832.0      65536.0       2.02%   2490368.0
83            layer3.4.bn2  256   8   8  256   8   8      512.0       0.06         65,536.0         32,768.0     67584.0      65536.0       0.73%    133120.0
84       layer3.4.shortcut  256   8   8  256   8   8        0.0       0.06              0.0              0.0         0.0          0.0       0.01%         0.0
85               CDD3.drop  256   8   8  256   8   8        0.0       0.06              0.0              0.0         0.0          0.0       0.65%         0.0
86               CDD3.norm  256   8   8  256   8   8      512.0       0.06         65,536.0         32,768.0     67584.0      65536.0       1.93%    133120.0
87                CDD3.CDD  128  16  16  256   8   8   295168.0       0.06     37,748,736.0     18,890,752.0   1311744.0      65536.0       2.11%   1377280.0
88                  linear          256          100    25700.0       0.00         51,100.0         25,600.0    103824.0        400.0       0.67%    104224.0
total                                                 7857892.0      12.94  2,356,643,740.0  1,179,067,392.0    103824.0        400.0     100.00%  53793056.0
=============================================================================================================================================================
Total params: 7,857,892
-------------------------------------------------------------------------------------------------------------------------------------------------------------
Total memory: 12.94MB
Total MAdd: 2.36GMAdd
Total Flops: 1.18GFlops
Total MemR+W: 51.3MB
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值