2*2*2简单神经网络的实现

chengchaowei—反向传播-通俗易懂进行实现

import numpy
def sigmoid(Z):
    A = 1 / (1 + numpy.exp(-Z))
    return A
def sigmoid_backward(A):
    dZ = A * (1 - A)
    return dZ
def forward(W, A, b):
    Z = numpy.dot(W, A) + b
    A = sigmoid(Z)
    return A
# 形参通过加上'_'来避免与实参名称发生冲突
def cost_compute(output_, Y_):
    cost_ = numpy.square(output_ - Y_) / 2
    print(f'cost = {cost_}')
    return cost_
def backward_out(W, A, output_, Y_, learningRate_):
    dZ = (output_ - Y_) * output_ * (1 - output_)
    dw = numpy.dot(dZ, A.T)
    W -= learningRate_ * dw
    return W, dZ
def backward_hide(W_prev, W_current, dZ_, A_current, X, learningRate_):
    # W_prev:(2, 2), dZ_:(2, 1), A_current * (1 - A_current):(2, 1)
    # dZ:(2, 1)
    dZ = numpy.dot(W_prev, dZ_) * A_current * (1 - A_current)
    dw = numpy.dot(dZ, X.T)
    W_current -= learningRate_ * dw
    return W_current, dZ
# 对参数进行初始化
Y = numpy.empty([2, 1], dtype=float)
Y[0] = 0.01
Y[1] = 0.99
learningRate = 0.5
X = numpy.empty([2, 1], dtype=float)
X[0] = 0.05
X[1] = 0.1
W1 = numpy.empty([2, 2], dtype=float)
W2 = numpy.empty([2, 2], dtype=float)
W1[0][0] = 0.15
W1[0][1] = 0.2
W1[1][0] = 0.25
W1[1][1] = 0.3
W2[0][0] = 0.4
W2[0][1] = 0.45
W2[1][0] = 0.5
W2[1][1] = 0.55
b1 = numpy.empty([2, 1], dtype=float)
b2 = numpy.empty([2, 1], dtype=float)
b1[0] = 0.35
b1[1] = 0.35
b2[0] = 0.6
b2[1] = 0.6
for i in range(100000):
    A1 = forward(W1, X, b1)
    output = forward(W2, A1, b2)
    cost = cost_compute(output, Y)
    W2, dZ = backward_out(W2, A1, output, Y, learningRate)
    W1, dZ = backward_hide(W2, W1, dZ, A1, X, learningRate)

print("最终输出:",output)
print("目标输出:",Y)
print("偏差值为:",Y-output)
训练结果:

训练结果

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值