b站视频:《PyTorch深度学习实践》完结合集_哔哩哔哩_bilibili第四讲作业
我这里是采用了所有的数据集进行学习,若训练集太多,就要采用mini_batch
import torch
import matplotlib.pyplot as plt
import numpy as np
# s数据集
x_data = [1.0, 2.0, 3.0]
y_data = [2.0, 4.0, 6.0]
# 设置初始值并梯度追踪
w1 = torch.Tensor([1.0])
w2 = torch.Tensor([1.0])
b = torch.Tensor([1.0])
w1.requires_grad = True
w2.requires_grad = True
b.requires_grad = True
def forward(x):
return w1 * (x ** 2) + w2 * x + b
def loss(x, y):
y_pred = forward(x)
return (y - y_pred) ** 2
# 损失函数列表
loss_list = []
for epoch in range(100):
loss_val = torch.Tensor([0.0])
for x, y in zip(x_data, y_data):
loss_val += loss(x, y)
loss_val = loss_val / len(x_data) # 全部数据集的均方误差
loss_list.append(loss_val.item())
loss_val.backward() # 反向传播
w1.data = w1.data - 0.01 * w1.grad.data # 这里用data,是因为这一步我们不需要计算这个运算的梯度,只是单纯的改变值
w2.data = w2.data - 0.01 * w2.grad.data
b.data = b.data - 0.01 * b.grad.data
w1.grad.data.zero_()
w2.grad.data.zero_()
b.grad.data.zero_()
print(f'epoch={epoch},loss_val={loss_val.item()},w1={w1.data},w2={w2.data},b={b.data}')
# 输出预测值
print(forward(4).item())
# 画图
plt.plot(np.arange(100), loss_list)
plt.xlabel('epoch')
plt.ylabel('loss')
plt.show()
如有错误,望指出,谢谢!