题目链接:Deep-ML
这道题主要是要考虑矩阵乘法的维度,保证维度正确,就可以获得最终的theata
import numpy as np
def linear_regression_gradient_descent(X: np.ndarray, y: np.ndarray, alpha: float, iterations: int) -> np.ndarray:
"""
Linear regression
:param X: m * n
:param y:
:param alpha:
:param iterations:
:return:
"""
m, n = X.shape
theta = np.zeros((n, 1))
y = y.reshape(m, 1) # 保证y是列向量
for i in range(iterations):
prediction = np.dot(X, theta) # m * 1
error = prediction - y # m * 1
gradient = np.dot(X.T, error) # n * 1
theta = theta - alpha * (1 / m) * gradien