SVM算法
一、sklearn中的SVM
1.准备一个简单二分类数据集
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
iris = datasets.load_iris()
x = iris.data
y = iris.target
# 只做一个简单的二分类
x = x[y<2, :2]
y = y[y<2]
plt.scatter(x[y==0, 0], x[y==0, 1])
plt.scatter(x[y==1, 0], x[y==1, 1])
plt.show()
2.实现svm,先使用一个比较大的C
# 标准化数据
from sklearn.preprocessing import StandardScaler
from sklearn.svm import LinearSVC
standardscaler = StandardScaler()
standardscaler.fit(x)
x_standard = standardscaler.transform(x)
svc = LinearSVC(C=1e9)
svc.fit(x_standard, y)
def plot_decision_boundary(model, axis):
x0, x1 = np.meshgrid(np.linspace(axis[0], axis[1], int((axis[1] - axis[0])*100)).reshape(1, -1),np.linspace(axis[2], axis[3], int((axis[3] - axis[2])*100)).reshape(1, -1),)
x_new = np.c_[x0.ravel(), x1.ravel()]
y_predict = model.predict(x_new)
zz = y_predict.reshape(x0.shape)
from matplotlib.colors import ListedColormap
custom_cmap = ListedColormap(['#EF9A9A','#FFF59D','#90CAF9'])
plt.contourf(x0, x1, zz, linewidth=5, cmap=custom_cmap)
plot_decision_boundary(svc, axis=[-3, 3, -3, 3])
plt.scatter(x_standard[y==0, 0], x_standard[y==0, 1], color='red')
plt.scatter(x_standard[y==1, 0], x_standard[y==1, 1], color='blue')
plt.show()
3.使用一个比较小的C,对比C取不同值的效果
svc2 = LinearSVC(C=0.01)
svc2.fit(x_standard, y)
plot_decision_boundary(svc2, axis=[-3, 3, -3, 3])
plt.scatter(x_standard[y==0, 0], x_standard[y==0, 1], color='red')
plt.scatter(x_standard[y==1, 0], x_standard[y==1, 1], color='blue')
plt.show()
4.画出除了决策边界以外的两条跟支持向量相关的直线
def plot_svc_decision_boundary(model, axis):
x0, x1 = np.meshgrid(np.linspace(axis[0], axis[1], int((axis[1] - axis[0])*100)).reshape(1, -1),np.linspace(axis[2], axis[3], int((axis[3] - axis[2])*100)).reshape(1, -1),)
x_new = np.c_[x0.ravel(), x1.ravel()]
y_predict = model.predict(x_new)
zz = y_predict.reshape(x0.shape)
from matplotlib.colors import ListedColormap
custom_cmap = ListedColormap(['#EF9A9A', '#FFF59D', '#90CAF9'])
plt.contourf(x0, x1, zz, linewidth=5, cmap=custom_cmap)
w = model.coef_[0]
b = model.intercept_[0]
# w0*x0 + w1*x1 + b = 0
# x1 = -w0/w1 * x0 - b/w1
plot_x = np.linspace(axis[0], axis[1], 200)
up_y = -w[0]/w[1] * plot_x - b/w[1] + 1/w[1]
down_y = -w[0]/w[1] * plot_x - b/w[1] - 1/w[1]
up_index = (up_y >= axis[2]) & (up_y <= axis[3])
down_index = (down_y >= axis[2]) & (down_y <= axis[3])
plt.plot(plot_x[up_index], up_y[up_index], color='black')
plt.plot(plot_x[down_index], down_y[down_index], color='black')
plot_svc_decision_boundary(svc, axis=[-3, 3, -3, 3])
plt.scatter(x_standard[y==0, 0], x_standard[y==0, 1], color='red')
plt.scatter(x_standard[y==1, 0], x_standard[y==1, 1], color='blue')
plt.show()
plot_svc_decision_boundary(svc2, axis=[-3, 3, -3, 3])
plt.scatter(x_standard[y==0, 0], x_standard[y==0, 1], color='red')
plt.scatter(x_standard[y==1, 0], x_standard[y==1, 1], color='blue')
plt.show()
svc3 = LinearSVC(C=0.1)
svc3.fit(x_standard, y)
# 从上述结果可以看出sklearn中对于svm封装的linearSVC默认对于多分类使用ovr,L2正则。
plot_svc_decision_boundary(svc3, axis=[-3, 3, -3, 3])
plt.scatter(x_standard[y==0, 0], x_standard[y==0, 1], color='red')
plt.scatter(x_standard[y==1, 0], x_standard[y==1, 1], color='blue')
plt.show()
二、SVM中使用多项式特征
1.svm解决非线性问题,先生成数据集
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
x, y = datasets.make_moons()
#x.shape
#y.shape
plt.scatter(x[y==0, 0], x[y==0, 1])
plt.scatter(x[y==1, 0], x[y==1, 1])
plt.show()
2.给数据添加一些随机噪声
x, y = datasets.make_moons(noise=0.15, random_state=666)
plt.scatter(x[y==0, 0], x[y==0, 1])
plt.scatter(x[y==1, 0], x[y==1, 1])
plt.show()
from sklearn.preprocessing import PolynomialFeatures, StandardScaler
from sklearn.svm import LinearSVC
from sklearn.pipeline import Pipeline
def PolynomiaSVC(degree, C=1.0):
return Pipeline([('poly', PolynomialFeatures(degree=degree)),('std_scale', StandardScaler()),('linear_svc', LinearSVC(C=C))])
poly_svc = PolynomiaSVC(degree=3)
poly_svc.fit(x, y)
def plot_decision_boundary(model, axis):
x0, x1 = np.meshgrid(np.linspace(axis[0], axis[1], int((axis[1] - axis[0])*100)).reshape(1, -1),np.linspace(axis[2], axis[3], int((axis[3] - axis[2])*100)).reshape(1, -1),)
x_new = np.c_[x0.ravel(), x1.ravel()]
y_predict = model.predict(x_new)
zz = y_predict.reshape(x0.shape)
from matplotlib.colors import ListedColormap
custom_cmap = ListedColormap(['#EF9A9A', '#FFF59D', '#90CAF9'])
plt.contourf(x0, x1, zz, linewidth=5, cmap=custom_cmap)
plot_decision_boundary(poly_svc, axis=[-1.5, 2.5, -1.0, 1.5])
plt.scatter(x[y==0, 0], x[y==0, 1])
plt.scatter(x[y==1, 0], x[y==1, 1])
plt.show()
除了使用这种增加多项式特征之后再给入线性svc中之外,还有一种方法可以实现类似的功能
from sklearn.svm import SVC
# 这种方法训练的过程并不完全是先将数据进行标准化,再使用linearSVC这么一个过程
# SVC中默认的C=0
def PolynomiaKernelSVC(degree, C=1.0):
return Pipeline([('std_scale', StandardScaler()),('kernel_svc', SVC(kernel='poly', degree=degree, C=C))])# poly代表多项式特征
poly_kernel_svc = PolynomiaKernelSVC(degree=3)
poly_kernel_svc.fit(x, y)
plot_decision_boundary(poly_svc, axis=[-1.5, 2.5, -1.0, 1.5])
plt.scatter(x[y==0, 0], x[y==0, 1])
plt.scatter(x[y==1, 0], x[y==1, 1])
plt.show()
三、高斯核函数
1.通过高斯核函数映射来更加直观地理解整个映射的过程
import numpy as np
import matplotlib.pyplot as plt
x = np.arange(-4, 5, 1)
y = np.array((x >= -2) & (x <= 2), dtype='int')
# array([0, 0, 1, 1, 1, 1, 1, 0, 0])
plt.scatter(x[y==0], [0] * len(x[y==0]))
plt.scatter(x[y==1], [0] * len(x[y==1]))
plt.show()
def gaussian(x, l):
gamma = 1.0
return np.exp(-gamma *(x-l)**2)
l1, l2 = -1, 1
x_new = np.empty((len(x), 2))
for i,data in enumerate(x):
x_new[i, 0] = gaussian(data, l1)
x_new[i, 1] = gaussian(data, l2)
plt.scatter(x_new[y==0, 0], x_new[y==0, 1])
plt.scatter(x_new[y==1, 0], x_new[y==1, 1])
plt.show()
2.使用sklearn中封装的高斯核函数
最大化分离超平面与训练数据集的几何间隔gamma(γ)=1.0时
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
x, y = datasets.make_moons(noise=0.15, random_state=666)
from sklearn.preprocessing import StandardScaler
from sklearn.svm import SVC
from sklearn.pipeline import Pipeline
def RBFKernelSVC(gamma=1.0):
return Pipeline([('std_scale', StandardScaler()),('svc', SVC(kernel='rbf', gamma=gamma))])
svc = RBFKernelSVC(gamma=1.0)
svc.fit(x, y)
def plot_decision_boundary(model, axis):
x0, x1 = np.meshgrid(np.linspace(axis[0], axis[1], int((axis[1] - axis[0])*100)).reshape(1, -1),np.linspace(axis[2], axis[3], int((axis[3] - axis[2])*100)).reshape(1, -1),)
x_new = np.c_[x0.ravel(), x1.ravel()]
y_predict = model.predict(x_new)
zz = y_predict.reshape(x0.shape)
from matplotlib.colors import ListedColormap
custom_cmap = ListedColormap(['#EF9A9A', '#FFF59D', '#90CAF9'])
plt.contourf(x0, x1, zz, linewidth=5, cmap=custom_cmap)
plot_decision_boundary(svc, axis=[-1.5, 2.5, -1.0, 1.5])
plt.scatter(x[y==0, 0], x[y==0, 1])
plt.scatter(x[y==1, 0], x[y==1, 1])
plt.show()
最大化分离超平面与训练数据集的几何间隔gamma(γ)=100时
svc_gamma100 = RBFKernelSVC(gamma=100)
svc_gamma100.fit(x, y)
plot_decision_boundary(svc_gamma100, axis=[-1.5, 2.5, -1.0, 1.5])
plt.scatter(x[y==0, 0], x[y==0, 1])
plt.scatter(x[y==1, 0], x[y==1, 1])
plt.show()
最大化分离超平面与训练数据集的几何间隔gamma(γ)=10时
svc_gamma10 = RBFKernelSVC(gamma=10)
svc_gamma10.fit(x, y)
plot_decision_boundary(svc_gamma10, axis=[-1.5, 2.5, -1.0, 1.5])
plt.scatter(x[y==0, 0], x[y==0, 1])
plt.scatter(x[y==1, 0], x[y==1, 1])
plt.show()
最大化分离超平面与训练数据集的几何间隔gamma(γ)=0.1时
svc_gamma01 = RBFKernelSVC(gamma=0.1)
svc_gamma01.fit(x, y)
plot_decision_boundary(svc_gamma01, axis=[-1.5, 2.5, -1.0, 1.5])
plt.scatter(x[y==0, 0], x[y==0, 1])
plt.scatter(x[y==1, 0], x[y==1, 1])
plt.show()
四、SVM解决回归问题
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
boston = datasets.load_boston()
x = boston.data
y = boston.target
from sklearn.model_selection import train_test_split
x_train, x_test, y_train, y_test = train_test_split(x, y, random_state=888)
from sklearn.svm import SVR
from sklearn.svm import LinearSVR
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import Pipeline
def StandardLinearSVR(epsilon=0.1):
return Pipeline([
('std_scale', StandardScaler()),
# C, kernel, 等超参需要调节
('linear_svr', LinearSVR(epsilon=epsilon))])
svr = StandardLinearSVR()
svr.fit(x_train, y_train)
svr.score(x_test, y_test)
……
博客刚开,多多包涵,以后会尽量完善的
I’m Ray.I’m ok.