8000 FIX: Updated the default gamma to reflect #10331 and tests, fixed the… · ivannz/scikit-learn@1d57e62 · GitHub
[go: up one dir, main page]

Skip to content

Commit 1d57e62

Browse files
committed
FIX: Updated the default gamma to reflect scikit-learn#10331 and tests, fixed the docstring parameter order
1 parent 354a099 commit 1d57e62

File tree

3 files changed

+31
-19
lines changed

3 files changed

+31
-19
lines changed

sklearn/svm/_classes.py

Lines changed: 14 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1383,18 +1383,19 @@ class SVDD(BaseLibSVM, OutlierMixin):
13831383
If none is given, 'rbf' will be used. If a callable is given it is
13841384
used to precompute the kernel matrix.
13851385
1386-
nu : float, optional
1387-
An upper bound on the fraction of training errors and a lower bound
1388-
on the fraction of support vectors. Should be in the interval (0, 1].
1389-
By default 0.5 will be taken.
1390-
13911386
degree : int, optional (default=3)
13921387
Degree of the polynomial kernel function ('poly').
13931388
Ignored by all other kernels.
13941389
13951390
gamma : float, optional (default='auto')
13961391
Kernel coefficient for 'rbf', 'poly' and 'sigmoid'.
1397-
If gamma is 'auto' then 1/n_features will be used instead.
1392+
1393+
Current default is 'auto' which uses 1 / n_features,
1394+
if ``gamma='scale'`` is passed then it uses 1 / (n_features * X.std())
1395+
as value of gamma. The current default of gamma, 'auto', will change
1396+
to 'scale' in version 0.22. 'auto_deprecated', a deprecated version of
1397+
'auto' is used as a default indicating that no explicit value of gamma
1398+
was passed.
13981399
13991400
coef0 : float, optional (default=0.0)
14001401
Independent term in kernel function.
@@ -1403,6 +1404,11 @@ class SVDD(BaseLibSVM, OutlierMixin):
14031404
tol : float, optional
14041405
Tolerance for stopping criterion.
14051406
1407+
nu : float, optional
1408+
An upper bound on the fraction of training errors and a lower bound
1409+
on the fraction of support vectors. Should be in the interval (0, 1].
1410+
By default 0.5 will be taken.
1411+
14061412
shrinking : boolean, optional
14071413
Whether to use the shrinking heuristic.
14081414
@@ -1458,8 +1464,8 @@ class SVDD(BaseLibSVM, OutlierMixin):
14581464

14591465
_impl = 'svdd_l1'
14601466

1461-
def __init__(self, kernel='rbf', degree=3, gamma='auto', coef0=0.0,
1462-
tol=1e-3, nu=0.5, shrinking=True, cache_size=200,
1467+
def __init__(self, kernel='rbf', degree=3, gamma='auto_deprecated',
1468+
coef0=0.0, tol=1e-3, nu=0.5, shrinking=True, cache_size=200,
14631469
verbose=False, max_iter=-1):
14641470

14651471
super(SVDD, self).__init__(

sklearn/svm/tests/test_sparse.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -312,8 +312,8 @@ def test_sparse_svdd():
312312
kernels = ["linear", "poly", "rbf", "sigmoid"]
313313
for dataset in datasets:
314314
for kernel in kernels:
315-
clf = svm.SVDD(kernel=kernel)
316-
sp_clf = svm.SVDD(kernel=kernel)
315+
clf = svm.SVDD(gamma='scale', kernel=kernel)
316+
sp_clf = svm.SVDD(gamma='scale', kernel=kernel)
317317
check_svm_model_equal(clf, sp_clf, *dataset)
318318

319319

sklearn/svm/tests/test_svm.py

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -299,15 +299,15 @@ def test_oneclass_score_samples():
299299

300300
def test_svdd():
301301
# Test the output of libsvm for the SVDD problem with default parameters
302-
clf = svm.SVDD()
302+
clf = svm.SVDD(gamma='scale')
303303
clf.fit(X)
304304
pred = clf.predict(T)
305305

306306
assert_array_equal(pred, [-1, -1, -1])
307307
assert_equal(pred.dtype, np.dtype('intp'))
308-
assert_array_almost_equal(clf.intercept_, [0.491], decimal=3)
308+
assert_array_almost_equal(clf.intercept_, [0.383], decimal=3)
309309
assert_array_almost_equal(clf.dual_coef_,
310-
[[0.632, 0.233, 0.633, 0.234, 0.632, 0.633]],
310+
[[0.681, 0.139, 0.680, 0.140, 0.680, 0.680]],
311311
decimal=3)
312312
assert_false(hasattr(clf, "coef_"))
313313

@@ -332,7 +332,8 @@ def test_svdd_decision_function():
332332
X_outliers = rnd.uniform(low=-4, high=4, size=(20, 2))
333333

334334
# fit the model
335-
clf = svm.SVDD(nu=0.1, kernel="poly", degree=2, coef0=1.0).fit(X_train)
335+
clf = svm.SVDD(gamma='scale', nu=0.1,
336+
kernel="poly", degree=2, coef0=1.0).fit(X_train)
336337

337338
# predict and validate things
338339
y_pred_test = clf.predict(X_test)
@@ -375,17 +376,22 @@ def test_svdd_score_samples():
375376
X_test = np.c_[xx.ravel(), yy.ravel()]
376377

377378
# Fit the model for at least 10% support vectors
378-
clf = svm.SVDD(nu=0.1, kernel="poly", degree=2, coef0=1.0)
379+
clf = svm.SVDD(nu=0.1, kernel="poly", gamma='scale', degree=2, coef0=1.0)
379380
clf.fit(X_train)
380381

381382
# Check score_samples() implementation
382383
assert_array_almost_equal(clf.score_samples(X_test),
383384
clf.decision_function(X_test) + clf.offset_)
384385

386+
# Test the gamma="scale"
387+
gamma = 1.0 / (X.shape[1] * X_train.std())
388+
389+
assert_almost_equal(clf._gamma, gamma)
390+
385391
# Compute the kernel matrices
386392
k_zx = polynomial_kernel(X_train[clf.support_], X_test,
387-
degree=clf.degree, coef0=clf.coef0)
388-
k_xx = polynomial_kernel(X_test,
393+
gamma=gamma, degree=clf.degree, coef0=clf.coef0)
394+
k_xx = polynomial_kernel(X_test, gamma=gamma,
389395
degree=clf.degree, coef0=clf.coef0).diagonal()
390396

391397
# Compute the sample scores = decision scores without `-\rho`
@@ -407,10 +413,10 @@ def test_oneclass_and_svdd():
407413
# Test the output of libsvm for the SVDD and the One-Class SVM
408414
nu = 0.15
409415

410-
svdd = svm.SVDD(nu=nu, kernel="rbf")
416+
svdd = svm.SVDD(nu=nu, kernel="rbf", gamma="scale")
411417
svdd.fit(X_train)
412418

413-
ocsvm = svm.OneClassSVM(nu=nu, kernel="rbf")
419+
ocsvm = svm.OneClassSVM(nu=nu, kernel="rbf", gamma="scale")
414420
ocsvm.fit(X_train)
415421

416422
# The intercept of the SVDD differs from that of the One-Class SVM:

0 commit comments

Comments
 (0)
0