8000 DOC Ensures that PassiveAggressiveClassifier passes numpydoc validati… · scikit-learn/scikit-learn@2ae7a3a · GitHub
[go: up one dir, main page]

Skip to content

Commit 2ae7a3a

Browse files
ovynnejglemaitre
andcommitted
DOC Ensures that PassiveAggressiveClassifier passes numpydoc validation (#21226)
Co-authored-by: Guillaume Lemaitre <g.lemaitre58@gmail.com>
1 parent 0d598f4 commit 2ae7a3a

File tree

2 files changed

+48
-47
lines changed

2 files changed

+48
-47
lines changed

maint_tools/test_docstrings.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@
2020
"MultiTaskLassoCV",
2121
"OrthogonalMatchingPursuit",
2222
"OrthogonalMatchingPursuitCV",
23-
"PassiveAggressiveClassifier",
2423
"PassiveAggressiveRegressor",
2524
"QuadraticDiscriminantAnalysis",
2625
"SparseRandomProjection",

sklearn/linear_model/_passive_aggressive.py

Lines changed: 48 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,12 @@
77

88

99
class PassiveAggressiveClassifier(BaseSGDClassifier):
10-
"""Passive Aggressive Classifier
10+
"""Passive Aggressive Classifier.
1111
1212
Read more in the :ref:`User Guide <passive_aggressive>`.
1313
1414
Parameters
1515
----------
16-
1716
C : float, default=1.0
1817
Maximum step size (regularization). Defaults to 1.0.
1918
@@ -58,10 +57,10 @@ class PassiveAggressiveClassifier(BaseSGDClassifier):
5857
shuffle : bool, default=True
5958
Whether or not the training data should be shuffled after each epoch.
6059
61-
verbose : integer, default=0
62-
The verbosity level
60+
verbose : int, default=0
61+
The verbosity level.
6362
64-
loss : string, default="hinge"
63+
loss : str, default="hinge"
6564
The loss function to be used:
6665
hinge: equivalent to PA-I in the reference paper.
6766
squared_hinge: equivalent to PA-II in the reference paper.
@@ -97,7 +96,7 @@ class PassiveAggressiveClassifier(BaseSGDClassifier):
9796
9897
The "balanced" mode uses the values of y to automatically adjust
9998
weights inversely proportional to class frequencies in the input data
100-
as ``n_samples / (n_classes * np.bincount(y))``
99+
as ``n_samples / (n_classes * np.bincount(y))``.
101100
102101
.. versionadded:: 0.17
103102
parameter *class_weight* to automatically weight samples.
@@ -109,15 +108,15 @@ class PassiveAggressiveClassifier(BaseSGDClassifier):
109108
average. So average=10 will begin averaging after seeing 10 samples.
110109
111110
.. versionadded:: 0.19
112-
parameter *average* to use weights averaging in SGD
111+
parameter *average* to use weights averaging in SGD.
113112
114113
Attributes
115114
----------
116-
coef_ : array, shape = [1, n_features] if n_classes == 2 else [n_classes,\
117-
n_features]
115+
coef_ : ndarray of shape (1, n_features) if n_classes == 2 else \
116+
(n_classes, n_features)
118117
Weights assigned to the features.
119118
120-
intercept_ : array, shape = [1] if n_classes == 2 else [n_classes]
119+
intercept_ : ndarray of shape (1,) if n_classes == 2 else (n_classes,)
121120
Constants in decision function.
122121
123122
n_features_in_ : int
@@ -135,7 +134,7 @@ class PassiveAggressiveClassifier(BaseSGDClassifier):
135134
The actual number of iterations to reach the stopping criterion.
136135
For multiclass fits, it is the maximum over every binary fit.
137136
138-
classes_ : array of shape (n_classes,)
137+
classes_ : ndarray of shape (n_classes,)
139138
The unique classes labels.
140139
141140
t_ : int
@@ -145,11 +144,21 @@ class PassiveAggressiveClassifier(BaseSGDClassifier):
145144
loss_function_ : callable
146145
Loss function used by the algorithm.
147146
147+
See Also
148+
--------
149+
SGDClassifier : Incrementally trained logistic regression.
150+
Perceptron : Linear perceptron classifier.
151+
152+
References
153+
----------
154+
Online Passive-Aggressive Algorithms
155+
<http://jmlr.csail.mit.edu/papers/volume7/crammer06a/crammer06a.pdf>
156+
K. Crammer, O. Dekel, J. Keshat, S. Shalev-Shwartz, Y. Singer - JMLR (2006)
157+
148158
Examples
149159
--------
150160
>>> from sklearn.linear_model import PassiveAggressiveClassifier
151161
>>> from sklearn.datasets import make_classification
152-
153162
>>> X, y = make_classification(n_features=4, random_state=0)
154163
>>> clf = PassiveAggressiveClassifier(max_iter=1000, random_state=0,
155164
... tol=1e-3)
@@ -161,18 +170,6 @@ class PassiveAggressiveClassifier(BaseSGDClassifier):
161170
[1.84127814]
162171
>>> print(clf.predict([[0, 0, 0, 0]]))
163172
[1]
164-
165-
See Also
166-
--------
167-
SGDClassifier
168-
Perceptron
169-
170-
References
171-
----------
172-
Online Passive-Aggressive Algorithms
173-
<http://jmlr.csail.mit.edu/papers/volume7/crammer06a/crammer06a.pdf>
174-
K. Crammer, O. Dekel, J. Keshat, S. Shalev-Shwartz, Y. Singer - JMLR (2006)
175-
176173
"""
177174

178175
def __init__(
@@ -221,12 +218,12 @@ def partial_fit(self, X, y, classes=None):
221218
Parameters
222219
----------
223220
X : {array-like, sparse matrix} of shape (n_samples, n_features)
224-
Subset of the training data
221+
Subset of the training data.
225222
226-
y : numpy array of shape [n_samples]
227-
Subset of the target values
223+
y : array-like of shape (n_samples,)
224+
Subset of the target values.
228225
229-
classes : array, shape = [n_classes]
226+
classes : ndarray of shape (n_classes,)
230227
Classes across all calls to partial_fit.
231228
Can be obtained by via `np.unique(y_all)`, where y_all is the
232229
target vector of the entire dataset.
@@ -236,7 +233,8 @@ def partial_fit(self, X, y, classes=None):
236233
237234
Returns
238235
-------
239-
self : returns an instance of self.
236+
self : object
237+
Fitted estimator.
240238
"""
241239
self._validate_params(for_partial_fit=True)
242240
if self.class_weight == "balanced":
@@ -272,20 +270,21 @@ def fit(self, X, y, coef_init=None, intercept_init=None):
272270
Parameters
273271
----------
274272
X : {array-like, sparse matrix} of shape (n_samples, n_features)
275-
Training data
273+
Training data.
276274
277-
y : numpy array of shape [n_samples]
278-
Target values
275+
y : array-like of shape (n_samples,)
276+
Target values.
279277
280-
coef_init : array, shape = [n_classes,n_features]
278+
coef_init : ndarray of shape (n_classes, n_features)
281279
The initial coefficients to warm-start the optimization.
282280
283-
intercept_init : array, shape = [n_classes]
281+
intercept_init : ndarray of shape (n_classes,)
284282
The initial intercept to warm-start the optimization.
285283
286284
Returns
287285
-------
288-
self : returns an instance of self.
286+
self : object
287+
Fitted estimator.
289288
"""
290289
self._validate_params()
291290
lr = "pa1" if self.loss == "hinge" else "pa2"
@@ -354,9 +353,9 @@ class PassiveAggressiveRegressor(BaseSGDRegressor):
354353
Whether or not the training data should be shuffled after each epoch.
355354
356355
verbose : integer, default=0
357-
The verbosity level
356+
The verbosity level.
358357
359-
loss : string, default="epsilon_insensitive"
358+
loss : str, default="epsilon_insensitive"
360359
The loss function to be used:
361360
epsilon_insensitive: equivalent to PA-I in the reference paper.
362361
squared_epsilon_insensitive: equivalent to PA-II in the reference
@@ -388,7 +387,7 @@ class PassiveAggressiveRegressor(BaseSGDRegressor):
388387
average. So average=10 will begin averaging after seeing 10 samples.
389388
390389
.. versionadded:: 0.19
391-
parameter *average* to use weights averaging in SGD
390+
parameter *average* to use weights averaging in SGD.
392391
393392
Attributes
394393
----------
@@ -436,13 +435,14 @@ class PassiveAggressiveRegressor(BaseSGDRegressor):
436435
437436
See Also
438437
--------
439-
SGDRegressor
438+
SGDRegressor : Linear model fitted by minimizing a regularized
439+
empirical loss with SGD.
440440
441441
References
442442
----------
443443
Online Passive-Aggressive Algorithms
444444
<http://jmlr.csail.mit.edu/papers/volume7/crammer06a/crammer06a.pdf>
445-
K. Crammer, O. Dekel, J. Keshat, S. Shalev-Shwartz, Y. Singer - JMLR (2006)
445+
K. Crammer, O. Dekel, J. Keshat, S. Shalev-Shwartz, Y. Singer - JMLR (2006).
446446
447447
"""
448448

@@ -490,14 +490,15 @@ def partial_fit(self, X, y):
490490
Parameters
491491
----------
492492
X : {array-like, sparse matrix} of shape (n_samples, n_features)
493-
Subset of training data
493+
Subset of training data.
494494
495495
y : numpy array of shape [n_samples]
496-
Subset of target values
496+
Subset of target values.
497497
498498
Returns
499499
-------
500-
self : returns an instance of self.
500+
self : object
501+
Fitted estimator.
501502
"""
502503
self._validate_params(for_partial_fit=True)
503504
lr = "pa1" if self.loss == "epsilon_insensitive" else "pa2"
@@ -520,10 +521,10 @@ def fit(self, X, y, coef_init=None, intercept_init=None):
520521
Parameters
521522
----------
522523
X : {array-like, sparse matrix} of shape (n_samples, n_features)
523-
Training data
524+
Training data.
524525
525526
y : numpy array of shape [n_samples]
526-
Target values
527+
Target values.
527528
528529
coef_init : array, shape = [n_features]
529530
The initial coefficients to warm-start the optimization.
@@ -533,7 +534,8 @@ def fit(self, X, y, coef_init=None, intercept_init=None):
533534
534535
Returns
535536
-------
536-
self : returns an instance of self.
537+
self : object
538+
Fitted estimator.
537539
"""
538540
self._validate_params()
539541
lr = "pa1" if self.loss == "epsilon_insensitive" else "pa2"

0 commit comments

Comments
 (0)
0