import numpy as np
from sklearn import datasets
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def dsigmoid(y):
return y * (1 - y)
def tanh(x):
return np.tanh(x)
def dtanh(y):
return 1.0 - y ** 2
class MLPClassifier:
def __init__(self, layers, activation='tanh', epochs=20, learning_rate=0.01):
# 第一层为输入层, 层数应等于样本特征数
# 最后一层为输出层
# 再算上中间层, 所以len(layers)最小为3
self.epochs = epochs
self.eta = learning_rate
self.layers = [np.zeros(layers[0])]
self.weights = []
self.biases = []
for i in range(len(layers) - 1):
# 这三个者的初始值是随意的
weight = np.random.random((layers[i + 1], layers[i]))
layer = np.ones(layers[i + 1])
bias = np.random.random(layers[i + 1])
self.weights.append(weight)
self.layers.append(layer)
self.biases.append(bias)
if activation == 'tanh':
self.activation = tanh
self.dactivation = dtanh
elif activation == 'sigmoid':
self.activation = sigmoid
self.dactivation = dsigmoid
def fit(self, X, y):
for _ in range(self.epochs):
# 随机梯度下降
indexes = np.random.permutation(X.shape[0])
for i in range(X.shape[0]):
self.forward(X[indexes[i]])
self.backward(y[indexes[i]])
return self
# 方便二维图像的可视化, 实际预测的结果可能不止有一个维度, 不一定能和数值进行比较
def predict(self, X):
return np.where(self.predict_prob(X) >= 0.5, 1, 0)
def predict_prob(self, X):
y = np.empty((X.shape[0], len(self.layers[-1])))
for i in range(X.shape[0
MLP的两种实现(不同在于forward)
最新推荐文章于 2025-03-23 10:09:41 发布