学习目标:
使用ReLU等非线性单元提升模型性能,构建深度神经网络,执行便于使用的神经网络类
需要定义的函数:
def initialize_parameters_deep(layer_dims): #layer_dims是包含每层隐藏单元数量的array
np.random.seed(1)
parameters = {}
L = len(layer_dims)
for l in range(1, L):
parameters['W' + str(l)] = np.random.randn(layer_dims[l], layer_dims[l-1]) * 0.01
parameters['b' + str(l)] = np.zeros((layer_dims[l], 1))
return parameters
-------------------------------------------------------
def linear_forward(A, W, b):
Z = np.dot(W, A) + b
cache = (A, W, b) # for backward
return Z, cache
-----------------------------------------------------
def linear_activation_forward(A_prev, W, b, activation):
if activation == 'sigmoid':
Z, linear_cache = linear_forward(A_prev, W, b)
A, activation_cache = sigmoid(Z)
elif activation =