核心代码1:
# 梯度下降
def cost(theta0, theta1, x, y):
J = 0
m = len(x)
for i in range(m):
h = theta1 * x[i] + theta0
J += (h - y[i])**2
J /= (2*m)
return J
核心代码2:
# 更新的值
def partial_cost_theta1(theta0, theta1, x, y):
h = theta0 + theta1 * x
# h对theta1求偏导
diff = (h - y) * x
partial = diff.sum() / (x.shape[0])
return partial
def partial_cost_theta0(theta0, theta1, x, y):
h = theta0 + theta1 * x
# h对theta0求偏导
diff = (h - y)
partial = diff.sum() / (x.shape[0])
return partial
核心代码3:
def gradient_descent(x, y, alpha = 0.1, theta0 = 0, theta1 = 0):
max_epochs = 1000 # 最大迭代次数
counter = 0 # 初始化次数
c = cost(theta1, theta0, pga.distance, pga.accuracy) #目标函数(损失函数)
costs = [c