多核支持向量机实践

sklearn中具有自定义内核的功能
参考sklearn文档

import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm, datasets

# import some data to play with
iris = datasets.load_iris()
X = iris.data[:, :2]  # we only take the first two features. We could
                      # avoid this ugly slicing by using a two-dim dataset
Y = iris.target
def my_kernel(X, Y):
    """
    We create a custom kernel:

                 (2  0)
    k(X, Y) = X  (    ) Y.T
                 (0  1)
    """
    M = np.array([[2, 0], [0, 1.0]])
    return np.dot(np.dot(X, M), Y.T)

h = .02  # step size in the mesh
# we create an instance of SVM and fit out data.
clf = svm.SVC(kernel=my_kernel)
clf.fit(X, Y)
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, x_max]x[y_min, y_max].
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])

# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.pcolormesh(xx, yy, Z, cmap=plt.cm.Paired)

# Plot also the training points
plt.scatter(X[:, 0], X[:, 1], c=Y, cmap=plt.cm.Paired, edgecolors='k')
plt.title('3-Class classification using Support Vector Machine with custom'
          ' kernel')
plt.axis('tight')
plt.show()

定义多核

def rbf(gamma=1.0):
	def rbf_fun(x1,x2):
		return math.exp((np.linalg.norm(x1-x2))*(-1.0*gamma))
	return rbf_fun

def lin(offset=0):
	def lin_fun(x1,x2):
		return x1.dot(x2.transpose())+offset
	return lin_fun

def poly(power=2,offset=0):
	def poly_fun(x1,x2):
		return pow(x1.dot(x2.transpose())+offset,power)
	return poly_fun

def sig(alpha=1.0,offset=0):
	def sig_fun(x1,x2):
		return math.tanh(alpha*1.0*x1.dot(x2.transpose())+offset)
	return sig_fun

def kernel_matrix(x,kernel):
	mat=np.zeros((x.shape[0],x.shape[0]))
	for a in range(x.shape[0]):
		for b in range(x.shape[0]):
			mat[a][b]=kernel(x[a],x[b])
	return mat

def f_dot(kernel_mat1,kernel_mat2):
	return (kernel_mat1.dot(kernel_mat2.transpose())).trace()

def A(kernel_mat1,kernel_mat2):
	return (f_dot(kernel_mat1,kernel_mat2))/(math.sqrt(f_dot(kernel_mat1,kernel_mat1)*f_dot(kernel_mat2,kernel_mat2)))

def beta_finder(x,y,kernel_list):
	y=np.matrix(y)
	yyT=y.dot(y.transpose())
	deno=sum([A(kernel_matrix(x,kernel),yyT) for kernel in kernel_list])
	betas=[A(kernel_matrix(x,kernel),yyT)/deno for kernel in kernel_list]
	return betas

def multi_kernel_maker(x,y,kernel_list):
	betas=[float(b) for b in beta_finder(x,y,kernel_list)]
	#print "	",betas
	def multi_kernal(x1,x2):
		mat=np.zeros((x1.shape[0],x2.shape[0]))
		for a in range(x1.shape[0]):
			for b in range(x2.shape[0]):
				mat[a][b]=sum([betas[i]*kernel(x1[a],x2[b]) for i,kernel in enumerate(kernel_list)])
		return mat
	return multi_kernal

载入数据

pkl_file_name='data.pkl'
x=load_dataset_obj_x(pkl_file_name)
x_train_AAC=feature_extract_AAC_pinlv(x)
x_train=x_train_AAC   
y=load_dataset_obj_y(label,pkl_file_name)

制造多核

#kernels = [lin(),lin(2),poly(),poly(3),poly(4),rbf(),rbf(1.5),sig(),sig(1.5)]
kernels = [rbf(1),rbf(10)]
kernel_numbers=2
multi_kernels = [mult for mult in itertools.combinations(kernels, kernel_numbers)]

模型训练

def mk_train(x_train,y_train,multi_kernels):
    y=[[t] for t in y_train[:]]
    #		y=[[t] for t in y_train[:,i]]
    for k_list in multi_kernels:
        mk_train_start_time=datetime.datetime.now()
        multi_kernel=multi_kernel_maker(x_train,y,k_list)
        print(k_list,'multi kernel maked! !')
        clf=SVC(kernel=multi_kernel)
        results=cross_val_score(clf,x_train, y_train, scoring='accuracy',cv=5)
        print(results.mean())
        mk_train_end_time=datetime.datetime.now()
        print('mk_train_time:',(mk_train_end_time-mk_train_start_time).seconds,'seconds')

   
print('model training starting')
mk_train(x_train,y,multi_kernels)
print('model training finishing')
#保存日志
import sys
f_handler=open('out.log', 'w')
sys.stdout=f_handler

你可能感兴趣的:(机器学习实验,机器学习)