8000 Make BaseSVC abstract again. · scikit-learn/scikit-learn@6776eb6 · GitHub
[go: up one dir, main page]

Skip to content

Commit 6776eb6

Browse files
committed
Make BaseSVC abstract again.
1 parent 5d58dfc commit 6776eb6

File tree

5 files changed

+30
-23
lines changed

5 files changed

+30
-23
lines changed

doc/modules/model_persistence.rst

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,9 +22,10 @@ persistence model, namely `pickle <http://docs.python.org/library/pickle.html>`_
2222
>>> iris = datasets.load_iris()
2323
>>> X, y = iris.data, iris.target
2424
>>> clf.fit(X, y) # doctest: +NORMALIZE_WHITESPACE
25-
SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0, degree=3, gamma=0.0,
26-
kernel='rbf', max_iter=-1, probability=False, random_state=None,
27-
shrinking=True, tol=0.001, verbose=False)
25+
SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0,
26+
compact_decision_function=None, degree=3, gamma=0.0, kernel='rbf',
27+
max_iter=-1, probability=False, random_state=None, shrinking=True, tol=0.001,
28+
verbose=False)
2829

2930
>>> import pickle
3031
>>> s = pickle.dumps(clf)
< 8000 button class="Button Button--iconOnly Button--invisible ExpandableHunkHeaderDiffLine-module__expand-button-line--rnQN5 ExpandableHunkHeaderDiffLine-module__expand-button-unified--j86KQ" aria-label="Expand file down from line 31" data-direction="down" aria-hidden="true" tabindex="-1">

doc/modules/pipeline.rst

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -42,9 +42,9 @@ is an estimator object::
4242
>>> clf # doctest: +NORMALIZE_WHITESPACE
4343
Pipeline(steps=[('reduce_dim', PCA(copy=True, n_components=None,
4444
whiten=False)), ('svm', SVC(C=1.0, cache_size=200, class_weight=None,
45-
coef0=0.0, degree=3, gamma=0.0, kernel='rbf', max_iter=-1,
46-
probability=False, random_state=None, shrinking=True, tol=0.001,
47-
verbose=False))])
45+
coef0=0.0, compact_decision_function=None, degree=3, gamma=0.0,
46+
kernel='rbf', max_iter=-1, probability=False, random_state=None,
47+
shrinking=True, tol=0.001, verbose=False))])
4848

4949
The utility function :func:`make_pipeline` is a shorthand
5050
for constructing pipelines;
@@ -76,9 +76,9 @@ Parameters of the estimators in the pipeline can be accessed using the
7676
>>> clf.set_params(svm__C=10) # doctest: +NORMALIZE_WHITESPACE
7777
Pipeline(steps=[('reduce_dim', PCA(copy=True, n_components=None,
7878
whiten=False)), ('svm', SVC(C=10, cache_size=200, class_weight=None,
79-
coef0=0.0, degree=3, gamma=0.0, kernel='rbf', max_iter=-1,
80-
probability=False, random_state=None, shrinking=True, tol=0.001,
81-
verbose=False))])
79+
coef0=0.0, compact_decision_function=None, degree=3, gamma=0.0,
80+
kernel='rbf', max_iter=-1, probability=False, random_state=None,
81+
shrinking=True, tol=0.001, verbose=False))])
8282

8383
This is particularly important for doing grid searches::
8484

doc/modules/svm.rst

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -76,9 +76,10 @@ n_features]`` holding the training samples, and an array y of class labels
7676
>>> y = [0, 1]
7777
>>> clf = svm.SVC()
7878
>>> clf.fit(X, y) # doctest: +NORMALIZE_WHITESPACE
79-
SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0, degree=3,
80-
gamma=0.0, kernel='rbf', max_iter=-1, probability=False, random_state=None,
81-
shrinking=True, tol=0.001, verbose=False)
79+
SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0,
80+
compact_decision_function=None, degree=3, gamma=0.0, kernel='rbf',
81+
max_iter=-1, probability=False, random_state=None, shrinking=True,
82+
tol=0.001, verbose=False)
8283

8384
After being fitted, the model can then be used to predict new values::
8485

@@ -115,9 +116,10 @@ classifiers are constructed and each one trains data from two classes::
115116
>>> Y = [0, 1, 2, 3]
116117
>>> clf = svm.SVC()
117118
>>> clf.fit(X, Y) # doctest: +NORMALIZE_WHITESPACE
118-
SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0, degree=3,
119-
gamma=0.0, kernel='rbf', max_iter=-1, probability=False, random_state=None,
120-
shrinking=True, tol=0.001, verbose=False)
119+
SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0,
120+
compact_decision_function=None, degree=3, gamma=0.0, kernel='rbf',
121+
max_iter=-1, probability=False, random_state=None, shrinking=True,
122+
tol=0.001, verbose=False)
121123
>>> dec = clf.decision_function([[1]])
122124
>>> dec.shape[1] # 4 classes: 4*3/2 = 6
123125
6
@@ -503,9 +505,10 @@ test vectors must be provided.
503505
>>> # linear kernel computation
504506
>>> gram = np.dot(X, X.T)
505507
>>> clf.fit(gram, y) # doctest: +NORMALIZE_WHITESPACE
506-
SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0, degree=3,
507-
gamma=0.0, kernel='precomputed', max_iter=-1, probability=False,
508-
random_state=None, shrinking=True, tol=0.001, verbose=False)
508+
SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0,
509+
compact_decision_function=None, degree=3, gamma=0.0, kernel='precomputed',
510+
max_iter=-1, probability=False, random_state=None, shrinking=True,
511+
tol=0.001, verbose=False)
509512
>>> # predict on training examples
510513
>>> clf.predict(gram)
511514
array([0, 1])

sklearn/svm/base.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -482,8 +482,9 @@ def _get_coef(self):
482482
return safe_sparse_dot(self._dual_coef_, self.support_vectors_)
483483

484484

485-
class BaseSVC(BaseLibSVM, ClassifierMixin):
485+
class BaseSVC(six.with_metaclass(ABCMeta, BaseLibSVM, ClassifierMixin)):
486486
"""ABC for LibSVM-based classifiers."""
487+
@abstractmethod
487488
def __init__(self, impl, kernel, degree, gamma, coef0, tol, C, nu,
488489
shrinking, probability, cache_size, class_weight, verbose,
489490
max_iter, compact_decision_function, random_state):

sklearn/svm/classes.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -487,9 +487,10 @@ class frequencies.
487487
>>> from sklearn.svm import SVC
488488
>>> clf = SVC()
489489
>>> clf.fit(X, y) #doctest: +NORMALIZE_WHITESPACE
490-
SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0, degree=3,
491-
gamma=0.0, kernel='rbf', max_iter=-1, probability=False,
492-
random_state=None, shrinking=True, tol=0.001, verbose=False)
490+
SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0,
491+
compact_decision_function=None, degree=3, gamma=0.0, kernel='rbf',
492+
max_iter=-1, probability=False, random_state=None, shrinking=True,
493+
tol=0.001, verbose=False)
493494
>>> print(clf.predict([[-0.8, -1]]))
494495
[1]
495496
@@ -626,7 +627,8 @@ class frequencies.
626627
>>> from sklearn.svm import NuSVC
627628
>>> clf = NuSVC()
628629
>>> clf.fit(X, y) #doctest: +NORMALIZE_WHITESPACE
629-
NuSVC(cache_size=200, coef0=0.0, degree=3, gamma=0.0, kernel='rbf',
630+
NuSVC(cache_size=200, class_weight=None, coef0=0.0,
631+
compact_decision_function=None, degree=3, gamma=0.0, kernel='rbf',
630632
max_iter=-1, nu=0.5, probability=False, random_state=None,
631633
shrinking=True, tol=0.001, verbose=False)
632634
>>> print(clf.predict([[-0.8, -1]]))

0 commit comments

Comments
 (0)
0