8000 update version in svdd docs to 1.1, relocate from 1.0 to 1.1 in whats… · ivannz/scikit-learn@742954a · GitHub
[go: up one dir, main page]

Skip to content

Commit 742954a

Browse files
committed
update version in svdd docs to 1.1, relocate from 1.0 to 1.1 in whats_new
add backticks (scikit-learn#20914), deprecate **params in fit (scikit-learn#20843), add feature_names_in_ (scikit-learn#20787) uncompromisingly reformat plot_oneclass_vs_svdd with black
1 parent b0f4926 commit 742954a

File tree

4 files changed

+64
-42
lines changed

4 files changed

+64
-42
lines changed

doc/whats_new/v1.0.rst

-7Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1213,13 +1213,6 @@ Changelog
12131213
now deprecated. Use `scipy.sparse.csgraph.shortest_path` instead. :pr:`20531`
12141214
by `Tom Dupre la Tour`_.
12151215

1216-
:mod:`sklearn.svm`
1217-
..................
1218-
1219-
- |Feature| Added the :class:`svm.SVDD` class for novelty detection based
1220-
on soft minimal volume hypersphere around the sample data. :pr:`7910`
1221-
by :user:`Ivan Nazarov <ivannz>`.
1222-
12231216
Code and Documentation Contributors
12241217
-----------------------------------
12251218

doc/whats_new/v1.1.rst

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1234,6 +1234,10 @@ Changelog
12341234
parameters in `fit` instead of `__init__`.
12351235
:pr:`21436` by :user:`Haidar Almubarak <Haidar13 >`.
12361236

1237+
- |Feature| Added the :class:`svm.SVDD` class for novelty detection based
1238+
on soft minimal volume hypersphere around the sample data. :pr:`7910`
1239+
by :user:`Ivan Nazarov <ivannz>`.
1240+
12371241
:mod:`sklearn.tree`
12381242
...................
12391243

examples/svm/plot_oneclass_vs_svdd.py

Lines changed: 49 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -41,22 +41,26 @@
4141
X_outliers = random_state.uniform(low=-4, high=4, size=(20, 2))
4242

4343
# Define the models
44-
nu = .1
45-
kernels = [("RBF", dict(kernel="rbf", gamma=0.1)),
46-
("Poly", dict(kernel="poly", degree=2, coef0=1.0)),
47-
]
44+
nu = 0.1
45+
kernels = [
46+
("RBF", dict(kernel="rbf", gamma=0.1)),
47+
("Poly", dict(kernel="poly", degree=2, coef0=1.0)),
48+
]
4849

4950
for kernel_name, kernel in kernels:
5051

5152
# Use low tolerance to ensure better precision of the SVM
5253
# optimization procedure.
53-
classifiers = [("OCSVM", svm.OneClassSVM(nu=nu, tol=1e-8, **kernel)),
54-
("SVDD", svm.SVDD(nu=nu, tol=1e-8, **kernel)),
55-
]
54+
classifiers = [
55+
("OCSVM", svm.OneClassSVM(nu=nu, tol=1e-8, **kernel)),
56+
("SVDD", svm.SVDD(nu=nu, tol=1e-8, **kernel)),
57+
]
5658

5759
fig = plt.figure(figsize=(12, 5))
58-
fig.suptitle("One-Class SVM versus SVDD "
59-
"(error train, error novel regular, error novel abnormal)")
60+
fig.suptitle(
61+
"One-Class SVM versus SVDD "
62+
"(error train, error novel regular, error novel abnormal)"
63+
)
6064

6165
for i, (model_name, clf) in enumerate(classifiers):
6266
clf.fit(X_train)
@@ -74,32 +78,46 @@
7478
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
7579
Z = Z.reshape(xx.shape)
7680

77-
ax.contourf(xx, yy, Z, levels=np.linspace(Z.min(), 0, 7),
78-
cmap=plt.cm.PuBu, zorder=-99)
79-
ax.contourf(xx, yy, Z, levels=[0, Z.max()], colors='palevioletred',
80-
zorder=-98)
81-
a = ax.contour(xx, yy, Z, levels=[0], linewidths=2, colors='darkred',
82-
zorder=-97)
81+
ax.contourf(
82+
xx, yy, Z, levels=np.linspace(Z.min(), 0, 7), cmap=plt.cm.PuBu, zorder=-99
83+
)
84+
ax.contourf(xx, yy, Z, levels=[0, Z.max()], colors="palevioletred", zorder=-98)
85+
a = ax.contour(
86+
xx, yy, Z, levels=[0], linewidths=2, colors="darkred", zorder=-97
87+
)
8388

8489
s = 40
85-
b1 = ax.scatter(X_train[:, 0], X_train[:, 1], s=s,
86-
c='white', edgecolors='k')
87-
b2 = ax.scatter(X_test[:, 0], X_test[:, 1], c='blueviolet', s=s)
88-
c = ax.scatter(X_outliers[:, 0], X_outliers[:, 1], c='gold', s=s)
89-
ax.axis('tight')
90+
b1 = ax.scatter(X_train[:, 0], X_train[:, 1], s=s, c="white", edgecolors="k")
91+
b2 = ax.scatter(X_test[:, 0], X_test[:, 1], c="blueviolet", s=s)
92+
c = ax.scatter(X_outliers[:, 0], X_outliers[:, 1], c="gold", s=s)
93+
ax.axis("tight")
9094
ax.set_xlim((-6, 6))
9195
ax.set_ylim((-6, 6))
9296

93-
ax.set_title("%s %s (%d/%d, %d/%d, %d/%d)"
94-
% (model_name, kernel_name,
95-
n_error_train, len(X_train),
96-
n_error_test, len(X_test),
97-
n_error_outliers, len(X_outliers)))
98-
99-
ax.legend([a.collections[0], b1, b2, c],
100-
["learned frontier", "training observations",
101-
"new regular observations", "new abnormal observations"],
102-
loc="lower right",
103-
prop=matplotlib.font_manager.FontProperties(size=10))
97+
ax.set_title(
98+
"%s %s (%d/%d, %d/%d, %d/%d)"
99+
% (
100+
model_name,
101+
kernel_name,
102+
n_error_train,
103+
len(X_train),
104+
n_error_test,
105+
len(X_test),
106+
n_error_outliers,
107+
len(X_outliers),
108+
)
109+
)
110+
111+
ax.legend(
112+
[a.collections[0], b1, b2, c],
113+
[
114+
"learned frontier",
115+
"training observations",
116+
"new regular observations",
117+
"new abnormal observations",
118+
],
119+
loc="lower right",
120+
prop=matplotlib.font_manager.FontProperties(size=10),
121+
)
104122

105123
plt.show()

sklearn/svm/_classes.py

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1837,7 +1837,7 @@ class SVDD(OutlierMixin, BaseLibSVM):
18371837
18381838
Read more in the :ref:`User Guide <svm_outlier_detection>`.
18391839
1840-
..versionadded: 1.0
1840+
..versionadded: 1.1
18411841
18421842
Parameters
18431843
----------
@@ -1912,7 +1912,9 @@ class SVDD(OutlierMixin, BaseLibSVM):
19121912
n_features_in_ : int
19131913
Number of features seen during :term:`fit`.
19141914
1915-
.. versionadded:: 0.24
1915+
feature_names_in_ : ndarray of shape (`n_features_in_`,)
1916+
Names of features seen during :term:`fit`. Defined only when `X`
1917+
has feature names that are all strings.
19161918
19171919
n_support_ : ndarray of shape (n_classes,), dtype=int32
19181920
Number of support vectors for each class.
@@ -2001,8 +2003,8 @@ def fit(self, X, y=None, sample_weight=None, **params):
20012003
Parameters
20022004
----------
20032005
X : {array-like, sparse matrix} of shape (n_samples, n_features)
2004-
Set of samples, where n_samples is the number of samples and
2005-
n_features is the number of features.
2006+
Set of samples, where `n_samples` is the number of samples and
2007+
`n_features` is the number of features.
20062008
20072009
y : Ignored
20082010
Not used, present for API consistency by convention.
@@ -2014,6 +2016,11 @@ def fit(self, X, y=None, sample_weight=None, **params):
20142016
**params : dict
20152017
Additional fit parameters.
20162018
2019+
.. deprecated:: 1.0
2020+
The `fit` method will not longer accept extra keyword
2021+
parameters in 1.2. These keyword parameters were
2022+
already discarded.
2023+
20172024
Returns
20182025
-------
20192026
self : object

0 commit comments

Comments
 (0)
0