建立全连接层
1)类继承nn.Module
2) 调用父类构造函数nn.Module.__init(self)
3)学习参数放在nn.Paramter()
4) 定义实例方法forward
5)初始值为Variable类型
import torch as t
import torch.nn as nn
from torch.autograd import Variable as V
class Linear(nn.Module):
def __init__(self,in_feature,out_feature):
nn.Module.__init__(self)
self.w=nn.Parameter(t.randn(in_feature,out_feature))
self.b=nn.Parameter(t.randn(out_feature))
def forward(self,x):
x=x.mm(self.w)
return x+self.b.expand_as(x)
input=V(t.randn(2,4))
layer=Linear(4,3)
output=layer(input)
for name, parameter in layer.named_parameters():
print(name, parameter) # w and b
多层感知机,激活函数为sigmoid函数
1)建立全连接类如上
2)建立多层感知机类
3)__init__中调用nn.Module.__init__(self)
4) 成员方法forword中实现计算
import torch as t
import torch.nn as nn
from torch.autograd import Variable as V
class Linear(nn.Module):
def __init__(self,in_feature,out_feature):
nn.Module.__init__(self)
self.w=nn.Parameter(t.randn(in_feature,out_feature))
self.b=nn.Parameter(t.randn(out_feature))
def forward(self,x):
x=x.mm(self.w)
return x+self.b.expand_as(x)
#多层感知机
class Perceptron(nn.Module):
def __init__(self,in_feature,hide_feature,out_feature):
nn.Module.__init__(self)
self.layer1=Linear(in_feature,hide_feature)
self.layer2=Linear(hide_feature,out_feature)
def forward(self,x):
x=self.layer1(x)
x=t.sigmoid(x)
x=self.layer2(x)
return x
Percet=Perceptron(4,3,1)
input=V(t.linspace(1,4,4).resize(1,4))
output=Percet(input)
print(output)
for name,parameter in Percet.named_parameters():
print(name,parameter)