这是我在网上搜的高斯过程的代码,挺容易理解的
import numpy as np
from scipy.optimize import minimize
class GPR:
def __init__(self, optimize=True):
self.is_fit = False
self.train_X, self.train_y = None, None
self.params = {
"l": 0.5, "sigma": 0.2}
self.optimize = optimize
def fit(self, train_X, train_y):
# store train data
self.train_X = train_X
self.train_y = train_y
self.is_fit = True
# hyper parameters optimization
def negative_log_likelihood_loss(params):
self.params["l"], self.params["sigma"] = params[0], params[1]
Kyy = self.kernel(self.train_X, self.train_X) + 1e-8 * np.eye(len(self.train_X))
loss = 0.5 * self.train_y.T.dot(np.linalg.inv(Kyy)).dot(self.train_y) + 0.5 * np.linalg.slogdet(Kyy)[1]
return loss.ravel()
if self.optimize:
res = minimize(negative_log_likelihood_loss, [self.params["l"], self.params["sigma"]],
bounds=((1e-4, 1e4), (1e-4, 1e4)),
method='L-BFGS-B')
self.params