# GRADED FUNCTION: forward_propagation
def forward_propagation(x, theta):
"""
Implement the linear forward propagation (compute J) presented in Figure 1 (J(theta) = theta * x)
"""
J = theta * x
return J
# GRADED FUNCTION: backward_propagation
def backward_propagation(x, theta):
"""
Computes the derivative of J with respect to theta (see Figure 1).
"""
dtheta = x
return dtheta
# GRADED FUNCTION: gradient_check
def gradient_check(x, theta, epsilon = 1e-7):
thetaplus = theta + epsilon # Step 1
thetaminus = theta - epsilon # Step 2
J_plus = thetaplus * x # Step 3
J_minus = thetaminus * x # Step 4
gradapprox = (J_plus - J_minus)/(2 * epsilon) # Step 5
# Check if gradapprox is close enough to the output of backward_propagation()
grad = backward_propagation(x, theta)
numerator = np.linalg.norm(grad - gradapprox) # Step 1'计算分母
denominator = np.linalg.norm(grad) + np.linalg.norm(gradapprox) # Step 2'计算分子
difference = numerator / denominator # Step 3'
if difference < 1e-7:
print ("The gradient is correct!")
else:
print ("The gradient is wrong!")
return difference