1 算法推导
2. 梯度下降实现
def batchGradientDescent(X, y , theta , alpha , repeat):
m = len(X)
cost = []
for i in range(0 , repeat):
hyp = np.dot(X , theta)
loss = hyp - y
grad = np.dot(X.T ,loss) / m
theta = theta - alpha * grad
cost1 = 0.5 * m * np.sum(np.square(np.dot(X , theta.T) - y))
cost.append(cost1)
return theta , cost
3. 矩阵实现
def linear_regression_mat(data,target):
X = np.mat(data)
X1 = X.T.dot(X)
if np.linalg.det(X1) == 0:
theta = np.linalg.solve(X.T , X).dot( X.T).dot( target)
print 'dddd'
return theta
theta = np.linalg.inv(X1).dot(X.T).dot( target)
#theta = np.linalg.inv(X1) * (X.T) * target
return theta
4. sklearn实现
lr = sklearn.linear_model.LinearRegression()
lr.fit(X_train , y_train)
y_pred = lr.predict(X_test)
print lr.coef_
print lr.intercept_
https://github.com/lmm915815/my_ML_python/tree/master/linear_regression