#自学中,有误欢迎交流!!!(记录我的学习过程)
#该篇是上篇的续集
import torch import numpy as np import torch.utils.data as Data import torch.nn as nn from torch import optim len=2#二维数据 num_example=1000#样本数1000 true_w=torch.tensor([2,-3.4]) true_b=4.2 features=torch.normal(0,0.01,(num_example,len)) labels= true_w[0] * features[:, 0] + true_w[1] * features[:, 1] + true_b#y=xw+b labels+=torch.tensor(np.random.normal(0,0.01,size=labels.size())) batch_size=10 dataset=Data.TensorDataset(features,labels) data_iter=Data.DataLoader(dataset,batch_size,shuffle=True) for X,y in data_iter: print(X,'\n',y) break #网络模型 net=nn.Sequential()#Sequential是一个有序的容器 net=nn.Linear(len,1)#标量输出 #传参 from torch.nn import init init.normal_(true_w,mean=0,std=0.01) init.constant_(torch.tensor(true_b),val=0.0) #损失函数 loss=nn.MSELoss() #优化算法 #net.parameters() optimizer=optim.SGD(net.parameters(),lr=0.03) #for param in net.parameters(): #print(param) #训练 num_epochs=3 for epoch in range(0,num_epochs): for X,y in data_iter: #with torch.no_grad(): l=loss(net(X),y.view(-1,1)) optimizer.zero_grad() l.backward() optimizer.step() print('epoch %d,loss: %f' % (epoch+1,l))