吴恩达作业ex6:Support Vector Machines

本文通过Matplotlib和Scikit-learn库展示了支持向量机(SVM)在二维数据上的决策边界。首先,使用线性核函数展示简单数据集的分类情况,然后通过高斯核(径向基函数,RBF)分析非线性数据集的决策边界。实验中,我们调整了核函数参数C和γ,观察它们如何影响决策边界和模型复杂度。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

from scipy.io import loadmat
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns

Support Vector Machines

data=loadmat('ex6data1.mat')
data
{'__header__': b'MATLAB 5.0 MAT-file, Platform: GLNXA64, Created on: Sun Nov 13 14:28:43 2011',
 '__version__': '1.0',
 '__globals__': [],
 'X': array([[1.9643  , 4.5957  ],
        [2.2753  , 3.8589  ],
        [2.9781  , 4.5651  ],
        [2.932   , 3.5519  ],
        [3.5772  , 2.856   ],
        [4.015   , 3.1937  ],
        [3.3814  , 3.4291  ],
        [3.9113  , 4.1761  ],
        [2.7822  , 4.0431  ],
        [2.5518  , 4.6162  ],
        [3.3698  , 3.9101  ],
        [3.1048  , 3.0709  ],
        [1.9182  , 4.0534  ],
        [2.2638  , 4.3706  ],
        [2.6555  , 3.5008  ],
        [3.1855  , 4.2888  ],
        [3.6579  , 3.8692  ],
        [3.9113  , 3.4291  ],
        [3.6002  , 3.1221  ],
        [3.0357  , 3.3165  ],
        [1.5841  , 3.3575  ],
        [2.0103  , 3.2039  ],
        [1.9527  , 2.7843  ],
        [2.2753  , 2.7127  ],
        [2.3099  , 2.9584  ],
        [2.8283  , 2.6309  ],
        [3.0473  , 2.2931  ],
        [2.4827  , 2.0373  ],
        [2.5057  , 2.3853  ],
        [1.8721  , 2.0577  ],
        [2.0103  , 2.3546  ],
        [1.2269  , 2.3239  ],
        [1.8951  , 2.9174  ],
        [1.561   , 3.0709  ],
        [1.5495  , 2.6923  ],
        [1.6878  , 2.4057  ],
        [1.4919  , 2.0271  ],
        [0.962   , 2.682   ],
        [1.1693  , 2.9276  ],
        [0.8122  , 2.9992  ],
        [0.9735  , 3.3881  ],
        [1.25    , 3.1937  ],
        [1.3191  , 3.5109  ],
        [2.2292  , 2.201   ],
        [2.4482  , 2.6411  ],
        [2.7938  , 1.9656  ],
        [2.091   , 1.6177  ],
        [2.5403  , 2.8867  ],
        [0.9044  , 3.0198  ],
        [0.76615 , 2.5899  ],
        [0.086405, 4.1045  ]]),
 'y': array([[1],
        [1],
        [1],
        [1],
        [1],
        [1],
        [1],
        [1],
        [1],
        [1],
        [1],
        [1],
        [1],
        [1],
        [1],
        [1],
        [1],
        [1],
        [1],
        [1],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [0],
        [1]], dtype=uint8)}
X=data['X']
y=data['y']
X.shape,y.shape
((51, 2), (51, 1))
X1=X[:,0]
X2=X[:,1]
sns.scatterplot(X1,X2,hue=y.ravel())
E:\Anaconda\lib\site-packages\seaborn\_decorators.py:36: FutureWarning: Pass the following variables as keyword args: x, y. From version 0.12, the only valid positional argument will be `data`, and passing other arguments without an explicit keyword will result in an error or misinterpretation.
  warnings.warn(





<AxesSubplot:>

在这里插入图片描述

from sklearn.svm import SVC
models=[SVC(kernel='linear',C=C) for C in (1,100)]
clfs=[model.fit(X,y.ravel()) for model in models]
from matplotlib.colors import ListedColormap
def plot_decision_regions(X,y,classifier,test_idx=None,resolution=0.02):

##简历颜色产生器和颜色绘图板
    markers=('s','x','o','^','y')
    colors=('red','blue','lightgreen','gray','cyan')
    cmap=ListedColormap(colors[:len(np.unique(y))])
    
##画出决策边界

    x1_min,x1_max=X[:,0].min()*0.4,X[:,0].max()*1.4
    x2_min,x2_max=X[:,1].min()*0.4,X[:,1].max()*1.4
    xx1,xx2=np.meshgrid(np.arange(x1_min,x1_max,resolution),
                       np.arange(x2_min,x2_max,resolution))
    z=classifier.predict(np.array([xx1.ravel(),xx2.ravel()]).T)
    z=z.reshape(xx1.shape)
    plt.contourf(xx1,xx2,z,alpha=0.2,cmap=cmap)
    plt.xlim(0,5)
    plt.ylim(1.5,5)
    
    #绘出样例
    titles = ['SVM Decision Boundary with C = {} (Example Dataset 1'.format(C) for C in [1, 100]]
    for idx,title in enumerate(titles):
        plt.scatter(x=X[y==idx,0],y=X[y==idx,1],
                   alpha=0.8,c=cmap(idx),
                   marker=markers[idx],label=idx)
        plt.title(title)
        
            
        
for model in clfs
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值