-
声明
以下均基于置入以下包
import torch
import numpy as np
from torch.autograd import Variable
import torchvision
import torch.nn as nn
from torchvision import transforms
-
变量Variable
x=torch.FloatTensor([1.,2.,3.])
# x=torch.randn(3)
print(x)
x_var = Variable(x,requires_grad =True)
y=x_var*2
print(y)
y.backward(torch.FloatTensor([1,0.1,0.01]))#原本三个维度的梯度都是2,使用参数声明后梯度乘上了[1,0.1,0.01]
print(x_var.grad,x_var.data,y.grad_fn)#变量的三个属性:梯度、值、得到变量的操作
#result:
# tensor([1., 2., 3.])
# tensor([2., 4., 6.], grad_fn=<MulBackward>)
# tensor([2.0000, 0.2000, 0.0200]) tensor([1., 2., 3.]) <MulBackward object at 0x7f7441fcb940>
-
数据集载入
test_set = torchvision.datasets.ImageFolder('dataset/test', transform=test_input_transform)
data_loader = torch.utils.data.DataLoader(test_set, batch_size=8, num_workers=2, shuffle=True)
-
自定义一个网络
class mynet(nn.Module):
def __init__(self, dim_in, dim_out):
super(mynet, self).__init__()
self.main = nn.Sequential(
nn.Conv2d(dim_in, dim_out, kernel_size=3, stride=1, padding=1, bias=False),
nn.InstanceNorm2d(dim_out, affine=True, track_running_stats=True),
nn.ReLU(inplace=True),
nn.Conv2d(dim_out, dim_out, kernel_size=3, stride=1, padding=1, bias=False),
nn.InstanceNorm2d(dim_out, affine=True, track_running_stats=True))
def forward(self, x):
return x + self.main(x)
sequential实现网络层的组合,还有一种增加块的方式定义网络,这个方法可以对每一层加名字。
layer1=nn.Sequential()
laye