"""
随机梯度下降
描述:每次选取一个样本参与梯度计算
场景:一元线性回归
"""
import numpy as np
np.random.seed(1)
X = 2 * np.random.rand(100, 1)
Y = 4 + 3 * np.random.randn(100, 1)
X = np.c_[np.ones((100, 1)), X]
m = 100
theta = np.random.randn(2, 1)
learning_rate = 0.001
n_epochs = 10000
if __name__ == "__main__":
for epoch in range(n_epochs):
for i in range(m):
random_index = np.random.randint(m)
xi = X[random_index:random_index+1]
yi = Y[random_index:random_index+1]
gradients = xi.T.dot(xi.dot(theta) - yi)
theta -= learning_rate * gradients
print(theta)
[[ 4.65856506]
[-0.47926501]]
虽然没调参,但这个是真不稳...