当前位置:网站首页>机器学习之支持向量机实例,线性核函数 多项式核函数 RBF高斯核函数 sigmoid核函数
机器学习之支持向量机实例,线性核函数 多项式核函数 RBF高斯核函数 sigmoid核函数
2022-08-04 18:41:00 【51CTO】
支持向量机实例
1.线性核函数
def
test_SVC_linear():
'''
测试 SVC 的用法。这里使用的是最简单的线性核
:param data: 可变参数。它是一个元组,这里要求其元素依次为训练样本集、测试样本集、训练样本的标记、测试样本的标记
:return: None
'''
iris
=
datasets.
load_iris()
X_train,
X_test,
y_train,
y_test
=
train_test_split(
iris.
data,
iris.
target,
test_size
=
0.25,
random_state
=
0,
stratify
=
iris.
target)
cls
=
SVC(
kernel
=
'linear')
cls.
fit(
X_train,
y_train)
print(
'Coefficients:%s, intercept %s'
%(
cls.
coef_,
cls.
intercept_))
print(
'Score: %.2f'
%
cls.
score(
X_test,
y_test))
- 1.
- 2.
- 3.
- 4.
- 5.
- 6.
- 7.
- 8.
- 9.
- 10.
- 11.
- 12.
- 13.
2.多项式核函数
def
test_SVC_poly():
'''
测试多项式核的 SVC 的预测性能随 degree、gamma、coef0 的影响.
:param data: 可变参数。它是一个元组,这里要求其元素依次为训练样本集、测试样本集、训练样本的标记、测试样本的标记
:return: None
'''
iris
=
datasets.
load_iris()
X_train,
X_test,
y_train,
y_test
=
train_test_split(
iris.
data,
iris.
target,
test_size
=
0.25,
random_state
=
0,
stratify
=
iris.
target)
fig
=
plt.
figure()
### 测试 degree ####
degrees
=
range(
1,
20)
train_scores
=[]
test_scores
=[]
for
degree
in
degrees:
cls
=
SVC(
kernel
=
'poly',
degree
=
degree,
gamma
=
'auto')
cls.
fit(
X_train,
y_train)
train_scores.
append(
cls.
score(
X_train,
y_train))
test_scores.
append(
cls.
score(
X_test,
y_test))
ax
=
fig.
add_subplot(
1,
3,
1)
# 一行三列
ax.
plot(
degrees,
train_scores,
label
=
"Training score ",
marker
=
'+' )
ax.
plot(
degrees,
test_scores,
label
=
" Testing score ",
marker
=
'o' )
ax.
set_title(
"SVC_poly_degree ")
ax.
set_xlabel(
"p")
ax.
set_ylabel(
"score")
ax.
set_ylim(
0,
1.05)
ax.
legend(
loc
=
"best",
framealpha
=
0.5)
### 测试 gamma ,此时 degree 固定为 3####
gammas
=
range(
1,
20)
train_scores
=[]
test_scores
=[]
for
gamma
in
gammas:
cls
=
SVC(
kernel
=
'poly',
gamma
=
gamma,
degree
=
3)
cls.
fit(
X_train,
y_train)
train_scores.
append(
cls.
score(
X_train,
y_train))
test_scores.
append(
cls.
score(
X_test,
y_test))
ax
=
fig.
add_subplot(
1,
3,
2)
ax.
plot(
gammas,
train_scores,
label
=
"Training score ",
marker
=
'+' )
ax.
plot(
gammas,
test_scores,
label
=
" Testing score ",
marker
=
'o' )
ax.
set_title(
"SVC_poly_gamma ")
ax.
set_xlabel(
r"$\gamma$")
ax.
set_ylabel(
"score")
ax.
set_ylim(
0,
1.05)
ax.
legend(
loc
=
"best",
framealpha
=
0.5)
### 测试 r ,此时 gamma固定为10 , degree 固定为 3######
rs
=
range(
0,
20)
train_scores
=[]
test_scores
=[]
for
r
in
rs:
cls
=
SVC(
kernel
=
'poly',
gamma
=
10,
degree
=
3,
coef0
=
r)
cls.
fit(
X_train,
y_train)
train_scores.
append(
cls.
score(
X_train,
y_train))
test_scores.
append(
cls.
score(
X_test,
y_test))
ax
=
fig.
add_subplot(
1,
3,
3)
ax.
plot(
rs,
train_scores,
label
=
"Training score ",
marker
=
'+' )
ax.
plot(
rs,
test_scores,
label
=
" Testing score ",
marker
=
'o' )
ax.
set_title(
"SVC_poly_r ")
ax.
set_xlabel(
r"r")
ax.
set_ylabel(
"score")
ax.
set_ylim(
0,
1.05)
ax.
legend(
loc
=
"best",
framealpha
=
0.5)
plt.
show()
- 1.
- 2.
- 3.
- 4.
- 5.
- 6.
- 7.
- 8.
- 9.
- 10.
- 11.
- 12.
- 13.
- 14.
- 15.
- 16.
- 17.
- 18.
- 19.
- 20.
- 21.
- 22.
- 23.
- 24.
- 25.
- 26.
- 27.
- 28.
- 29.
- 30.
- 31.
- 32.
- 33.
- 34.
- 35.
- 36.
- 37.
- 38.
- 39.
- 40.
- 41.
- 42.
- 43.
- 44.
- 45.
- 46.
- 47.
- 48.
- 49.
- 50.
- 51.
- 52.
- 53.
- 54.
- 55.
- 56.
- 57.
- 58.
- 59.
- 60.
- 61.
- 62.
- 63.
- 64.
3.RBF高斯核函数
ef
test_SVC_rbf():
'''
测试 高斯核的 SVC 的预测性能随 gamma 参数的影响
:param data: 可变参数。它是一个元组,这里要求其元素依次为训练样本集、测试样本集、训练样本的标记、测试样本的标记
:return: None
'''
iris
=
datasets.
load_iris()
X_train,
X_test,
y_train,
y_test
=
train_test_split(
iris.
data,
iris.
target,
test_size
=
0.25,
random_state
=
0,
stratify
=
iris.
target)
gammas
=
range(
1,
20)
train_scores
=[]
test_scores
=[]
for
gamma
in
gammas:
cls
=
SVC(
kernel
=
'rbf',
gamma
=
gamma)
cls.
fit(
X_train,
y_train)
train_scores.
append(
cls.
score(
X_train,
y_train))
test_scores.
append(
cls.
score(
X_test,
y_test))
fig
=
plt.
figure()
ax
=
fig.
add_subplot(
1,
1,
1)
ax.
plot(
gammas,
train_scores,
label
=
"Training score ",
marker
=
'+' )
ax.
plot(
gammas,
test_scores,
label
=
" Testing score ",
marker
=
'o' )
ax.
set_title(
"SVC_rbf")
ax.
set_xlabel(
r"$\gamma$")
ax.
set_ylabel(
"score")
ax.
set_ylim(
0,
1.05)
ax.
legend(
loc
=
"best",
framealpha
=
0.5)
plt.
show()
- 1.
- 2.
- 3.
- 4.
- 5.
- 6.
- 7.
- 8.
- 9.
- 10.
- 11.
- 12.
- 13.
- 14.
- 15.
- 16.
- 17.
- 18.
- 19.
- 20.
- 21.
- 22.
- 23.
- 24.
- 25.
- 26.
- 27.
- 28.
4.sigmoid核函数
def
test_SVC_sigmoid():
'''
测试 sigmoid 核的 SVC 的预测性能随 gamma、coef0 的影响.
:param data: 可变参数。它是一个元组,这里要求其元素依次为训练样本集、测试样本集、训练样本的标记、测试样本的标记
:return: None
'''
iris
=
datasets.
load_iris()
X_train,
X_test,
y_train,
y_test
=
train_test_split(
iris.
data,
iris.
target,
test_size
=
0.25,
random_state
=
0,
stratify
=
iris.
target)
fig
=
plt.
figure()
### 测试 gamma ,固定 coef0 为 0 ####
gammas
=
np.
logspace(
-
2,
1)
train_scores
=[]
test_scores
=[]
for
gamma
in
gammas:
cls
=
SVC(
kernel
=
'sigmoid',
gamma
=
gamma,
coef0
=
0)
cls.
fit(
X_train,
y_train)
train_scores.
append(
cls.
score(
X_train,
y_train))
test_scores.
append(
cls.
score(
X_test,
y_test))
ax
=
fig.
add_subplot(
1,
2,
1)
ax.
plot(
gammas,
train_scores,
label
=
"Training score ",
marker
=
'+' )
ax.
plot(
gammas,
test_scores,
label
=
" Testing score ",
marker
=
'o' )
ax.
set_title(
"SVC_sigmoid_gamma ")
ax.
set_xscale(
"log")
ax.
set_xlabel(
r"$\gamma$")
ax.
set_ylabel(
"score")
ax.
set_ylim(
0,
1.05)
ax.
legend(
loc
=
"best",
framealpha
=
0.5)
### 测试 r,固定 gamma 为 0.01 ######
rs
=
np.
linspace(
0,
5)
train_scores
=[]
test_scores
=[]
for
r
in
rs:
cls
=
SVC(
kernel
=
'sigmoid',
coef0
=
r,
gamma
=
0.01)
cls.
fit(
X_train,
y_train)
train_scores.
append(
cls.
score(
X_train,
y_train))
test_scores.
append(
cls.
score(
X_test,
y_test))
ax
=
fig.
add_subplot(
1,
2,
2)
ax.
plot(
rs,
train_scores,
label
=
"Training score ",
marker
=
'+' )
ax.
plot(
rs,
test_scores,
label
=
" Testing score ",
marker
=
'o' )
ax.
set_title(
"SVC_sigmoid_r ")
ax.
set_xlabel(
r"r")
ax.
set_ylabel(
"score")
ax.
set_ylim(
0,
1.05)
ax.
legend(
loc
=
"best",
framealpha
=
0.5)
plt.
show()
- 1.
- 2.
- 3.
- 4.
- 5.
- 6.
- 7.
- 8.
- 9.
- 10.
- 11.
- 12.
- 13.
- 14.
- 15.
- 16.
- 17.
- 18.
- 19.
- 20.
- 21.
- 22.
- 23.
- 24.
- 25.
- 26.
- 27.
- 28.
- 29.
- 30.
- 31.
- 32.
- 33.
- 34.
- 35.
- 36.
- 37.
- 38.
- 39.
- 40.
- 41.
- 42.
- 43.
- 44.
- 45.
- 46.
- 47.
- 48.
- 49.
- 50.
代码:
import
numpy
as
np
from
sklearn
import
datasets
from
sklearn.
model_selection
import
train_test_split
from
sklearn.
svm
import
SVC
import
matplotlib.
pyplot
as
plt
def
test_SVC_linear():
'''
测试 SVC 的用法。这里使用的是最简单的线性核
:param data: 可变参数。它是一个元组,这里要求其元素依次为训练样本集、测试样本集、训练样本的标记、测试样本的标记
:return: None
'''
iris
=
datasets.
load_iris()
X_train,
X_test,
y_train,
y_test
=
train_test_split(
iris.
data,
iris.
target,
test_size
=
0.25,
random_state
=
0,
stratify
=
iris.
target)
cls
=
SVC(
kernel
=
'linear')
cls.
fit(
X_train,
y_train)
print(
'Coefficients:%s, intercept %s'
%(
cls.
coef_,
cls.
intercept_))
print(
'Score: %.2f'
%
cls.
score(
X_test,
y_test))
def
test_SVC_poly():
'''
测试多项式核的 SVC 的预测性能随 degree、gamma、coef0 的影响.
:param data: 可变参数。它是一个元组,这里要求其元素依次为训练样本集、测试样本集、训练样本的标记、测试样本的标记
:return: None
'''
iris
=
datasets.
load_iris()
X_train,
X_test,
y_train,
y_test
=
train_test_split(
iris.
data,
iris.
target,
test_size
=
0.25,
random_state
=
0,
stratify
=
iris.
target)
fig
=
plt.
figure()
### 测试 degree ####
degrees
=
range(
1,
20)
train_scores
=[]
test_scores
=[]
for
degree
in
degrees:
cls
=
SVC(
kernel
=
'poly',
degree
=
degree,
gamma
=
'auto')
cls.
fit(
X_train,
y_train)
train_scores.
append(
cls.
score(
X_train,
y_train))
test_scores.
append(
cls.
score(
X_test,
y_test))
ax
=
fig.
add_subplot(
1,
3,
1)
# 一行三列
ax.
plot(
degrees,
train_scores,
label
=
"Training score ",
marker
=
'+' )
ax.
plot(
degrees,
test_scores,
label
=
" Testing score ",
marker
=
'o' )
ax.
set_title(
"SVC_poly_degree ")
ax.
set_xlabel(
"p")
ax.
set_ylabel(
"score")
ax.
set_ylim(
0,
1.05)
ax.
legend(
loc
=
"best",
framealpha
=
0.5)
### 测试 gamma ,此时 degree 固定为 3####
gammas
=
range(
1,
20)
train_scores
=[]
test_scores
=[]
for
gamma
in
gammas:
cls
=
SVC(
kernel
=
'poly',
gamma
=
gamma,
degree
=
3)
cls.
fit(
X_train,
y_train)
train_scores.
append(
cls.
score(
X_train,
y_train))
test_scores.
append(
cls.
score(
X_test,
y_test))
ax
=
fig.
add_subplot(
1,
3,
2)
ax.
plot(
gammas,
train_scores,
label
=
"Training score ",
marker
=
'+' )
ax.
plot(
gammas,
test_scores,
label
=
" Testing score ",
marker
=
'o' )
ax.
set_title(
"SVC_poly_gamma ")
ax.
set_xlabel(
r"$\gamma$")
ax.
set_ylabel(
"score")
ax.
set_ylim(
0,
1.05)
ax.
legend(
loc
=
"best",
framealpha
=
0.5)
### 测试 r ,此时 gamma固定为10 , degree 固定为 3######
rs
=
range(
0,
20)
train_scores
=[]
test_scores
=[]
for
r
in
rs:
cls
=
SVC(
kernel
=
'poly',
gamma
=
10,
degree
=
3,
coef0
=
r)
cls.
fit(
X_train,
y_train)
train_scores.
append(
cls.
score(
X_train,
y_train))
test_scores.
append(
cls.
score(
X_test,
y_test))
ax
=
fig.
add_subplot(
1,
3,
3)
ax.
plot(
rs,
train_scores,
label
=
"Training score ",
marker
=
'+' )
ax.
plot(
rs,
test_scores,
label
=
" Testing score ",
marker
=
'o' )
ax.
set_title(
"SVC_poly_r ")
ax.
set_xlabel(
r"r")
ax.
set_ylabel(
"score")
ax.
set_ylim(
0,
1.05)
ax.
legend(
loc
=
"best",
framealpha
=
0.5)
plt.
show()
def
test_SVC_rbf():
'''
测试 高斯核的 SVC 的预测性能随 gamma 参数的影响
:param data: 可变参数。它是一个元组,这里要求其元素依次为训练样本集、测试样本集、训练样本的标记、测试样本的标记
:return: None
'''
iris
=
datasets.
load_iris()
X_train,
X_test,
y_train,
y_test
=
train_test_split(
iris.
data,
iris.
target,
test_size
=
0.25,
random_state
=
0,
stratify
=
iris.
target)
gammas
=
range(
1,
20)
train_scores
=[]
test_scores
=[]
for
gamma
in
gammas:
cls
=
SVC(
kernel
=
'rbf',
gamma
=
gamma)
cls.
fit(
X_train,
y_train)
train_scores.
append(
cls.
score(
X_train,
y_train))
test_scores.
append(
cls.
score(
X_test,
y_test))
fig
=
plt.
figure()
ax
=
fig.
add_subplot(
1,
1,
1)
ax.
plot(
gammas,
train_scores,
label
=
"Training score ",
marker
=
'+' )
ax.
plot(
gammas,
test_scores,
label
=
" Testing score ",
marker
=
'o' )
ax.
set_title(
"SVC_rbf")
ax.
set_xlabel(
r"$\gamma$")
ax.
set_ylabel(
"score")
ax.
set_ylim(
0,
1.05)
ax.
legend(
loc
=
"best",
framealpha
=
0.5)
plt.
show()
def
test_SVC_sigmoid():
'''
测试 sigmoid 核的 SVC 的预测性能随 gamma、coef0 的影响.
:param data: 可变参数。它是一个元组,这里要求其元素依次为训练样本集、测试样本集、训练样本的标记、测试样本的标记
:return: None
'''
iris
=
datasets.
load_iris()
X_train,
X_test,
y_train,
y_test
=
train_test_split(
iris.
data,
iris.
target,
test_size
=
0.25,
random_state
=
0,
stratify
=
iris.
target)
fig
=
plt.
figure()
### 测试 gamma ,固定 coef0 为 0 ####
gammas
=
np.
logspace(
-
2,
1)
train_scores
=[]
test_scores
=[]
for
gamma
in
gammas:
cls
=
SVC(
kernel
=
'sigmoid',
gamma
=
gamma,
coef0
=
0)
cls.
fit(
X_train,
y_train)
train_scores.
append(
cls.
score(
X_train,
y_train))
test_scores.
append(
cls.
score(
X_test,
y_test))
ax
=
fig.
add_subplot(
1,
2,
1)
ax.
plot(
gammas,
train_scores,
label
=
"Training score ",
marker
=
'+' )
ax.
plot(
gammas,
test_scores,
label
=
" Testing score ",
marker
=
'o' )
ax.
set_title(
"SVC_sigmoid_gamma ")
ax.
set_xscale(
"log")
ax.
set_xlabel(
r"$\gamma$")
ax.
set_ylabel(
"score")
ax.
set_ylim(
0,
1.05)
ax.
legend(
loc
=
"best",
framealpha
=
0.5)
### 测试 r,固定 gamma 为 0.01 ######
rs
=
np.
linspace(
0,
5)
train_scores
=[]
test_scores
=[]
for
r
in
rs:
cls
=
SVC(
kernel
=
'sigmoid',
coef0
=
r,
gamma
=
0.01)
cls.
fit(
X_train,
y_train)
train_scores.
append(
cls.
score(
X_train,
y_train))
test_scores.
append(
cls.
score(
X_test,
y_test))
ax
=
fig.
add_subplot(
1,
2,
2)
ax.
plot(
rs,
train_scores,
label
=
"Training score ",
marker
=
'+' )
ax.
plot(
rs,
test_scores,
label
=
" Testing score ",
marker
=
'o' )
ax.
set_title(
"SVC_sigmoid_r ")
ax.
set_xlabel(
r"r")
ax.
set_ylabel(
"score")
ax.
set_ylim(
0,
1.05)
ax.
legend(
loc
=
"best",
framealpha
=
0.5)
plt.
show()
if
__name__
==
"__main__":
test_SVC_linear()
test_SVC_poly()
test_SVC_rbf()
test_SVC_sigmoid()
- 1.
- 2.
- 3.
- 4.
- 5.
- 6.
- 7.
- 8.
- 9.
- 10.
- 11.
- 12.
- 13.
- 14.
- 15.
- 16.
- 17.
- 18.
- 19.
- 20.
- 21.
- 22.
- 23.
- 24.
- 25.
- 26.
- 27.
- 28.
- 29.
- 30.
- 31.
- 32.
- 33.
- 34.
- 35.
- 36.
- 37.
- 38.
- 39.
- 40.
- 41.
- 42.
- 43.
- 44.
- 45.
- 46.
- 47.
- 48.
- 49.
- 50.
- 51.
- 52.
- 53.
- 54.
- 55.
- 56.
- 57.
- 58.
- 59.
- 60.
- 61.
- 62.
- 63.
- 64.
- 65.
- 66.
- 67.
- 68.
- 69.
- 70.
- 71.
- 72.
- 73.
- 74.
- 75.
- 76.
- 77.
- 78.
- 79.
- 80.
- 81.
- 82.
- 83.
- 84.
- 85.
- 86.
- 87.
- 88.
- 89.
- 90.
- 91.
- 92.
- 93.
- 94.
- 95.
- 96.
- 97.
- 98.
- 99.
- 100.
- 101.
- 102.
- 103.
- 104.
- 105.
- 106.
- 107.
- 108.
- 109.
- 110.
- 111.
- 112.
- 113.
- 114.
- 115.
- 116.
- 117.
- 118.
- 119.
- 120.
- 121.
- 122.
- 123.
- 124.
- 125.
- 126.
- 127.
- 128.
- 129.
- 130.
- 131.
- 132.
- 133.
- 134.
- 135.
- 136.
- 137.
- 138.
- 139.
- 140.
- 141.
- 142.
- 143.
- 144.
- 145.
- 146.
- 147.
- 148.
- 149.
- 150.
- 151.
- 152.
- 153.
- 154.
- 155.
- 156.
- 157.
- 158.
- 159.
- 160.
- 161.
- 162.
- 163.
- 164.
- 165.
- 166.
- 167.
- 168.
- 169.
- 170.
- 171.
- 172.
- 173.
结果:
线性核函数

多项式核函数

RBF高斯核函数

sigmoid核函数

边栏推荐
- VPC2187/8 current mode PWM controller 4-100VIN ultra-wide voltage startup, highly integrated power control chip recommended
- ros2订阅esp32发布的电池电压数据
- 工业元宇宙对工业带来的改变
- ACP-Cloud Computing By Wakin自用笔记(1)云计算基础、虚拟化技术
- margin 塌陷和重合的理解
- Activity数据库字段说明
- EuROC 数据集格式及相关代码
- PHP代码审计7—文件上传漏洞
- Google Earth Engine APP——一键在线查看全球1984-至今年的影像同时加载一个影像分析
- win10 uwp 动态修改ListView元素布局
猜你喜欢
随机推荐
【RTOS训练营】关于上课和答疑
测试/开发程序员男都秃头?女都满脸痘痘?过好我们“短暂“的一生......
ERC721标准与加密猫
基于 eBPF 的 Kubernetes 可观测实践
YOLOv7-Pose尝鲜,基于YOLOv7的关键点模型测评
Matlab drawing 1
敏捷开发项目管理的一些心得
Homework 8.3 Thread Synchronization Mutex Condition Variables
Enterprise survey correlation analysis case
mood swings
MMDetection 使用示例:从入门到出门
巴比特 | 元宇宙每日必读:微博动漫将招募全球各类虚拟偶像并为其提供扶持...
Google Earth Engine APP——一键在线查看全球1984-至今年的影像同时加载一个影像分析
关于使用腾讯云HiFlow场景连接器每天提醒签到打卡
DHCP&OSPF combined experimental demonstration (Huawei routing and switching equipment configuration)
FE01_OneHot-Scala Application
LVS+NAT 负载均衡群集,NAT模式部署
win10 uwp DataContext
防火墙基础之防火墙做出口设备安全防护
股票开户广发证券,网上开户安全吗?









