# 手动实现逻辑回归
import numpy as np
# Sigmoid 函数
def sigmoid(z):
return 1 / (1 + np.exp(-z))
def compute_cost(y, y_pred):
"""计算逻辑回归的成本函数"""
m = len(y)
cost = - (1 / m) * np.sum(y * np.log(y_pred) + (1 - y) * np.log(1 - y_pred))
return cost
def train_logistic_regression(X, y, learning_rate=0.01, num_iterations=1000):
# X.shape:获取特征矩阵X的形状,m是样本数量,n是特征数量。
# weights:初始化为全零的权重向量。
# bias:初始化为零的偏置项。
m, n = X.shape
weights = np.zeros(n) # 初始化权重
bias = 0 # 初始化偏置
for i in range(num_iterations): # 进行num_iterations次迭代。
# 计算预测值
z = np.dot(X, weights) + bias
y_pred = sigmoid(z)
# 计算损失
loss = compute_cost(y, y_pred)
# 计算梯度
dz = y_pred - y
dw = np.dot(X.T, dz) / m
db = np.sum(dz) / m
# 更新权重和偏置
weights -= learning_rate * dw # 使用学习率更新权重
bias -= learning_rate * db # 使用学习率更新偏置。
if i % 100 == 0: # 每 100 次迭代打印损失
print(f"Iteration {i}: Loss = {loss}")
return weights, bias
# 预测函数
def predict(X, weights, bias):
z = np.dot(X, weights) + bias
y_pred = sigmoid(z)
return np.round(y_pred)
利用python numpy库手动实现机器学习中的逻辑回归算法
于 2024-10-10 17:54:53 首次发布