1. 背景:
# -*- coding:utf-8 -*-
from sklearn import svm
x = [[2,0],[1,1],[2,3]]
#python中的两类问题分类标签
y = [0,0,1]
clf = svm.SVC(kernel=‘linear‘)
clf.fit(x,y)
print(clf)
#get support vectors 支持向量
print(clf.support_vectors_)
#get index of support vectors 支持向量的下标
print(clf.support_)
#get number of support vectors for each class
print(clf.n_support_)
结果
SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0,
decision_function_shape=None, degree=3, gamma=‘auto‘, kernel=‘linear‘,
max_iter=-1, probability=False, random_state=None, shrinking=True,
tol=0.001, verbose=False)
[[ 1. 1.]
[ 2. 3.]]
[1 2]
[1 1]
实例2
# -*- coding:utf-8 -*-
import numpy as np
import pylab as pl
from sklearn import svm
#we create 40 separable points
np.random.seed(0)#控制每次的数据相同
#- [2,2] + [2,2] 是为了使得两组正态分布的数据呈线性上下分开
X = np.r_[np.random.randn(20,2) - [2,2], np.random.randn(20,2) + [2,2]]
Y = [0]*20 + [1]*20 #类标签
#fit the model
clf = svm.SVC(kernel=‘linear‘)
clf.fit(X ,Y)
#get the separating hyperplane
w = clf.coef_[0]
a = -w[0] / w[1] #斜率
xx = np.linspace(-5,5) #[-5,-4,-3,...,5]
#clf.intercept_[0]) 为w[3]
yy = a * xx - (clf.intercept_[0]) / w[1]
# plot the parallels to the separating hyperplane that pass through the support vectors
b = clf.support_vectors_[0]#取出第一个支持向量
yy_down = a * xx + (b[1] - a*b[0]) #(b[1] - a*b[0])下面直线的截距
b = clf.support_vectors_[-1]#最后一个支持向量
yy_up = a * xx + (b[1] - a*b[0])
print("w: ", w)
print("a: ", a)
# print "xx: ", xx
# print "yy: ", yy
print("support_vectors_: ", clf.support_vectors_)
print("clf.coef_: ", clf.coef_)
# switching to the generic n-dimensional parameterization of the hyperplan to the 2D-specific equation
# of a line y=a.x +b: the generic w_0x + w_1y +w_3=0 can be rewritten y = -(w_0/w_1) x + (w_3/w_1)
# plot the line, the points, and the nearest vectors to the plane
pl.plot(xx, yy, ‘k-‘)
pl.plot(xx, yy_down, ‘k--‘)
pl.plot(xx, yy_up, ‘k--‘)
pl.scatter(clf.support_vectors_[:, 0], clf.support_vectors_[:, 1],
s=80, facecolors=‘none‘)
pl.scatter(X[:, 0], X[:, 1], c=Y, cmap=pl.cm.Paired)
pl.axis(‘tight‘)
pl.show()
结果
w: [ 0.90230696 0.64821811]
a: -1.39198047626
support_vectors_: [[-1.02126202 0.2408932 ]
[-0.46722079 -0.53064123]
[ 0.95144703 0.57998206]]
clf.coef_: [[ 0.90230696 0.64821811]]
原文:https://www.cnblogs.com/lyywj170403/p/10424196.html