这是一个两层权连接网络的实现。迭代500次。其中定义了自己的MyReLU函数,通过编写forward backward函数,可以直接调用该函数。
#PyTorch:Defineing new autograd functions
# -*- coding: utf-8 -*-
import torch
class MyReLU(torch.autograd.Function):
@staticmethod
def forward(ctx,input):
ctx.save_for_backward(input)
return input.clamp(min=0)
@staticmethod
def backward(ctx,grad_output):
input,=ctx.saved_tensors
grad_input=grad_output.clone()
grad_input[input<0]=0
return grad_input
dtype=torch.float
device=torch.device('cpu')
# N is batch size; D_in is input dimension;
# H is hidden dimension; D_out is output dimension.
N, D_in, H, D_out = 64, 1000, 100, 10
# Create random Tensors to hold input and outputs.
x = torch.randn(N, D_in, device=device, dtype=dtype)
y = torch.randn(N, D_out, device=device, dtype=dtype)
# Create random Tensors for weights.
w1 = torch.randn(D_in, H, device=device, dtype=dtype, requires_grad=True)
w2 = torch.randn(H,