简短的回答是“也许不是”。您是否尝试过以下类似的方法?
基于http://scikit-learn.org/stable/modules/svm.html上的示例,您需要以下内容:
import numpy as np
from sklearn import svm
X = np.array([[0, 0], [1, 1]])
y = [0, 1]
clf = svm.SVC(kernel='precomputed')
# kernel computation
K = numpy.zeros(shape = (n, n))
# "At the moment, the kernel values between all training vectors
# and the test vectors must be provided."
# according to scikit learn web page.
# -- This is the problem!
# v1: array, shape (n, d)
# w1: float in [0, 1)
chi = sklearn.metrics.pairwise.chi2_kernel(v1, v1)
mu = 1.0 / numpy.mean(chi)
K += w1 * numpy.exp(-mu * chi)
# v2: array, shape (n, d)
# w2: float in [0, 1)
chi = sklearn.metrics.pairwise.chi2_kernel(v2, v2)
mu = 1.0 / numpy.mean(chi)
K += w2 * numpy.exp(-mu * chi)
# v3: array, shape (n, d)
# w3: float in [0, 1)
chi = sklearn.metrics.pairwise.chi2_kernel(v3, v3)
mu = 1.0 / numpy.mean(chi)
K += w3 * numpy.exp(-mu * chi)
# v4: array, shape (n, d)
# w4: float in [0, 1)
chi = sklearn.metrics.pairwise.chi2_kernel(v4, v4)
mu = 1.0 / numpy.mean(chi)
K += w4 * numpy.exp(-mu * chi)
# scikit-learn is a wrapper LIBSVM and looking at the LIBSVM Readme file
# it seems you need kernel values for test data something like this:
Kt = numpy.zeros(shape = (nt, n))
# t1: array, shape (nt, d)
# w1: float in [0, 1)
chi = sklearn.metrics.pairwise.chi2_kernel(t1, v1)
mu = 1.0 / numpy.mean(chi)
Kt += w1 * numpy.exp(-mu * chi)
# v2: array, shape (n, d)
# w2: float in [0, 1)
chi = sklearn.metrics.pairwise.chi2_kernel(t2, v2)
mu = 1.0 / numpy.mean(chi)
Kt += w2 * numpy.exp(-mu * chi)
# v3: array, shape (n, d)
# w3: float in [0, 1)
chi = sklearn.metrics.pairwise.chi2_kernel(t3, v3)
mu = 1.0 / numpy.mean(chi)
Kt += w3 * numpy.exp(-mu * chi)
# v4: array, shape (n, d)
# w4: float in [0, 1)
chi = sklearn.metrics.pairwise.chi2_kernel(t4, v4)
mu = 1.0 / numpy.mean(chi)
Kt += w4 * numpy.exp(-mu * chi)
clf.fit(K, y)
# predict on testing examples
probas_ = clf.predict_proba(Kt)
从这里开始,只需复制http://scikit-learn.org/0.13/auto_examples/plot_roc.html的底部