一.简易搭建
- 直接使用net2即可
net2=torch.nn.Sequential(
torch.nn.Linear(2,10),
torch.nn.ReLU(),
torch.nn.Linear(10,2)
)
二.形参传递
class Net(torch.nn.Module):
def __init__(self,n_features,n_hidden,n_output):
super(Net,self).__init__()
self.hidden=torch.nn.Linear(n_features,n_hidden)
self.out=torch.nn.Linear(n_hidden,n_output)
def forward(self,x):
x=F.relu(self.hidden(x))
x=self.out(x)
return x
*需要自己传入网络神经元参数
net=Net(2,10,2)
三.形参赋值的形式
class Net(torch.nn.Module):
def __init__(self):
super(Net,self).__init__()
self.hidden=torch.nn.Linear(1,20)
self.predict=torch.nn.Linear(20,1)
def forward(self,x):
x=F.relu(x)
x=self.predict(x)
return x
net_SGD = Net()
net_Momentum = Net()
net_RMSprop = Net()
net_Adam = Net()
四.训练数据
import torch.utils.data as Data
#x为训练数据,y为标签
torch_dataset=Data.TensorDataset(x,y)
#data模块加载数据集,训练批次,乱序训练
loader=Data.DataLoader(dataset=torch_dataset,batch_size=BATCH_SIZE,shuffle=True,)
- 迭代训练
1.知识点补充:这里for 循环enumerate : step表示本次迭代的索引,batch_x,batch_y为训练数据和标签
#整套数据训练三次
for epoch in range(3):
for step,(batch_x,batch_y) in enumerate(loader): #每一步loader用BATCH——SIZE训练
print('Epoch: ', epoch, '| Step: ', step, '| batch x: ',
batch_x.numpy(), '| batch y: ', batch_y.numpy())
- for循环zip并行训练,net, opt, l_his in分别对应数组nets, optimizers, losses_his中的数据
for net, opt, l_his in zip(nets, optimizers, losses_his):
output = net(b_x) # get output for every net
loss = loss_func(output, b_y) # compute loss for every net
opt.zero_grad() # clear gradients for next train
loss.backward() # backpropagation, compute gradients
opt.step() # apply gradients
l_his.append(loss.data.numpy()) # loss recoder
参考zip学习