7
7
8
8
9
9
class PassiveAggressiveClassifier (BaseSGDClassifier ):
10
- """Passive Aggressive Classifier
10
+ """Passive Aggressive Classifier.
11
11
12
12
Read more in the :ref:`User Guide <passive_aggressive>`.
13
13
14
14
Parameters
15
15
----------
16
-
17
16
C : float, default=1.0
18
17
Maximum step size (regularization). Defaults to 1.0.
19
18
@@ -58,10 +57,10 @@ class PassiveAggressiveClassifier(BaseSGDClassifier):
58
57
shuffle : bool, default=True
59
58
Whether or not the training data should be shuffled after each epoch.
60
59
61
- verbose : integer , default=0
62
- The verbosity level
60
+ verbose : int , default=0
61
+ The verbosity level.
63
62
64
- loss : string , default="hinge"
63
+ loss : str , default="hinge"
65
64
The loss function to be used:
66
65
hinge: equivalent to PA-I in the reference paper.
67
66
squared_hinge: equivalent to PA-II in the reference paper.
@@ -97,7 +96,7 @@ class PassiveAggressiveClassifier(BaseSGDClassifier):
97
96
98
97
The "balanced" mode uses the values of y to automatically adjust
99
98
weights inversely proportional to class frequencies in the input data
100
- as ``n_samples / (n_classes * np.bincount(y))``
99
+ as ``n_samples / (n_classes * np.bincount(y))``.
101
100
102
101
.. versionadded:: 0.17
103
102
parameter *class_weight* to automatically weight samples.
@@ -109,15 +108,15 @@ class PassiveAggressiveClassifier(BaseSGDClassifier):
109
108
average. So average=10 will begin averaging after seeing 10 samples.
110
109
111
110
.. versionadded:: 0.19
112
- parameter *average* to use weights averaging in SGD
111
+ parameter *average* to use weights averaging in SGD.
113
112
114
113
Attributes
115
114
----------
116
- coef_ : array, shape = [ 1, n_features] if n_classes == 2 else [n_classes, \
117
- n_features]
115
+ coef_ : ndarray of shape ( 1, n_features) if n_classes == 2 else \
116
+ (n_classes, n_features)
118
117
Weights assigned to the features.
119
118
120
- intercept_ : array, shape = [1] if n_classes == 2 else [ n_classes]
119
+ intercept_ : ndarray of shape (1,) if n_classes == 2 else ( n_classes,)
121
120
Constants in decision function.
122
121
123
122
n_features_in_ : int
@@ -135,7 +134,7 @@ class PassiveAggressiveClassifier(BaseSGDClassifier):
135
134
The actual number of iterations to reach the stopping criterion.
136
135
For multiclass fits, it is the maximum over every binary fit.
137
136
138
- classes_ : array of shape (n_classes,)
137
+ classes_ : ndarray of shape (n_classes,)
139
138
The unique classes labels.
140
139
141
140
t_ : int
@@ -145,11 +144,21 @@ class PassiveAggressiveClassifier(BaseSGDClassifier):
145
144
loss_function_ : callable
146
145
Loss function used by the algorithm.
147
146
147
+ See Also
148
+ --------
149
+ SGDClassifier : Incrementally trained logistic regression.
150
+ Perceptron : Linear perceptron classifier.
151
+
152
+ References
153
+ ----------
154
+ Online Passive-Aggressive Algorithms
155
+ <http://jmlr.csail.mit.edu/papers/volume7/crammer06a/crammer06a.pdf>
156
+ K. Crammer, O. Dekel, J. Keshat, S. Shalev-Shwartz, Y. Singer - JMLR (2006)
157
+
148
158
Examples
149
159
--------
150
160
>>> from sklearn.linear_model import PassiveAggressiveClassifier
151
161
>>> from sklearn.datasets import make_classification
152
-
153
162
>>> X, y = make_classification(n_features=4, random_state=0)
154
163
>>> clf = PassiveAggressiveClassifier(max_iter=1000, random_state=0,
155
164
... tol=1e-3)
@@ -161,18 +170,6 @@ class PassiveAggressiveClassifier(BaseSGDClassifier):
161
170
[1.84127814]
162
171
>>> print(clf.predict([[0, 0, 0, 0]]))
163
172
[1]
164
-
165
- See Also
166
- --------
167
- SGDClassifier
168
- Perceptron
169
-
170
- References
171
- ----------
172
- Online Passive-Aggressive Algorithms
173
- <http://jmlr.csail.mit.edu/papers/volume7/crammer06a/crammer06a.pdf>
174
- K. Crammer, O. Dekel, J. Keshat, S. Shalev-Shwartz, Y. Singer - JMLR (2006)
175
-
176
173
"""
177
174
178
175
def __init__ (
@@ -221,12 +218,12 @@ def partial_fit(self, X, y, classes=None):
221
218
Parameters
222
219
----------
223
220
X : {array-like, sparse matrix} of shape (n_samples, n_features)
224
- Subset of the training data
221
+ Subset of the training data.
225
222
226
- y : numpy array of shape [ n_samples]
227
- Subset of the target values
223
+ y : array-like of shape ( n_samples,)
224
+ Subset of the target values.
228
225
229
- classes : array, shape = [ n_classes]
226
+ classes : ndarray of shape ( n_classes,)
230
227
Classes across all calls to partial_fit.
231
228
Can be obtained by via `np.unique(y_all)`, where y_all is the
232
229
target vector of the entire dataset.
@@ -236,7 +233,8 @@ def partial_fit(self, X, y, classes=None):
236
233
237
234
Returns
238
235
-------
239
- self : returns an instance of self.
236
+ self : object
237
+ Fitted estimator.
240
238
"""
241
239
self ._validate_params (for_partial_fit = True )
242
240
if self .class_weight == "balanced" :
@@ -272,20 +270,21 @@ def fit(self, X, y, coef_init=None, intercept_init=None):
272
270
Parameters
273
271
----------
274
272
X : {array-like, sparse matrix} of shape (n_samples, n_features)
275
- Training data
273
+ Training data.
276
274
277
- y : numpy array of shape [ n_samples]
278
- Target values
275
+ y : array-like of shape ( n_samples,)
276
+ Target values.
279
277
280
- coef_init : array, shape = [ n_classes,n_features]
278
+ coef_init : ndarray of shape ( n_classes, n_features)
281
279
The initial coefficients to warm-start the optimization.
282
280
283
- intercept_init : array, shape = [ n_classes]
281
+ intercept_init : ndarray of shape ( n_classes,)
284
282
The initial intercept to warm-start the optimization.
285
283
286
284
Returns
287
285
-------
288
- self : returns an instance of self.
286
+ self : object
287
+ Fitted estimator.
289
288
"""
290
289
self ._validate_params ()
291
290
lr = "pa1" if self .loss == "hinge" else "pa2"
@@ -354,9 +353,9 @@ class PassiveAggressiveRegressor(BaseSGDRegressor):
354
353
Whether or not the training data should be shuffled after each epoch.
355
354
356
355
verbose : integer, default=0
357
- The verbosity level
356
+ The verbosity level.
358
357
359
- loss : string , default="epsilon_insensitive"
358
+ loss : str , default="epsilon_insensitive"
360
359
The loss function to be used:
361
360
epsilon_insensitive: equivalent to PA-I in the reference paper.
362
361
squared_epsilon_insensitive: equivalent to PA-II in the reference
@@ -388,7 +387,7 @@ class PassiveAggressiveRegressor(BaseSGDRegressor):
388
387
average. So average=10 will begin averaging after seeing 10 samples.
389
388
390
389
.. versionadded:: 0.19
391
- parameter *average* to use weights averaging in SGD
390
+ parameter *average* to use weights averaging in SGD.
392
391
393
392
Attributes
394
393
----------
@@ -436,13 +435,14 @@ class PassiveAggressiveRegressor(BaseSGDRegressor):
436
435
437
436
See Also
438
437
--------
439
- SGDRegressor
438
+ SGDRegressor : Linear model fitted by minimizing a regularized
439
+ empirical loss with SGD.
440
440
441
441
References
442
442
----------
443
443
Online Passive-Aggressive Algorithms
444
444
<http://jmlr.csail.mit.edu/papers/volume7/crammer06a/crammer06a.pdf>
445
- K. Crammer, O. Dekel, J. Keshat, S. Shalev-Shwartz, Y. Singer - JMLR (2006)
445
+ K. Crammer, O. Dekel, J. Keshat, S. Shalev-Shwartz, Y. Singer - JMLR (2006).
446
446
447
447
"""
448
448
@@ -490,14 +490,15 @@ def partial_fit(self, X, y):
490
490
Parameters
491
491
----------
492
492
X : {array-like, sparse matrix} of shape (n_samples, n_features)
493
- Subset of training data
493
+ Subset of training data.
494
494
495
495
y : numpy array of shape [n_samples]
496
- Subset of target values
496
+ Subset of target values.
497
497
498
498
Returns
499
499
-------
500
- self : returns an instance of self.
500
+ self : object
501
+ Fitted estimator.
501
502
"""
502
503
self ._validate_params (for_partial_fit = True )
503
504
lr = "pa1" if self .loss == "epsilon_insensitive" else "pa2"
@@ -520,10 +521,10 @@ def fit(self, X, y, coef_init=None, intercept_init=None):
520
521
Parameters
521
522
----------
522
523
X : {array-like, sparse matrix} of shape (n_samples, n_features)
523
- Training data
524
+ Training data.
524
525
525
526
y : numpy array of shape [n_samples]
526
- Target values
527
+ Target values.
527
528
528
529
coef_init : array, shape = [n_features]
529
530
The initial coefficients to warm-start the optimization.
@@ -533,7 +534,8 @@ def fit(self, X, y, coef_init=None, intercept_init=None):
533
534
534
535
Returns
535
536
-------
536
- self : returns an instance of self.
537
+ self : object
538
+ Fitted estimator.
537
539
"""
538
540
self ._validate_params ()
539
541
lr = "pa1" if self .loss == "epsilon_insensitive" else "pa2"
0 commit comments