8000 DOC Ensure SGDRegressor passes numpydoc validation (#21097) · scikit-learn/scikit-learn@16ed158 · GitHub
[go: up one dir, main page]

Skip to content

Commit 16ed158

Browse files
EricEllwangerfrellwanglemaitre
authored
DOC Ensure SGDRegressor passes numpydoc validation (#21097)
Co-authored-by: frellwan <frellwan@hotmail.com> Co-authored-by: Guillaume Lemaitre <g.lemaitre58@gmail.com>
1 parent 5af0de2 commit 16ed158

File tree

2 files changed

+22
-15
lines changed

2 files changed

+22
-15
lines changed

maint_tools/test_docstrings.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,6 @@
3434
"QuadraticDiscriminantAnalysis",
3535
"RandomizedSearchCV",
3636
"RobustScaler",
37-
"SGDRegressor",
3837
"SelfTrainingClassifier",
3938
"SparseRandomProjection",
4039
"SpectralBiclustering",

sklearn/linear_model/_stochastic_gradient.py

Lines changed: 22 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1436,18 +1436,19 @@ def partial_fit(self, X, y, sample_weight=None):
14361436
Parameters
14371437
----------
14381438
X : {array-like, sparse matrix}, shape (n_samples, n_features)
1439-
Subset of training data
1439+
Subset of training data.
14401440
14411441
y : numpy array of shape (n_samples,)
1442-
Subset of target values
1442+
Subset of target values.
14431443
14441444
sample_weight : array-like, shape (n_samples,), default=None
14451445
Weights applied to individual samples.
14461446
If not provided, uniform weights are assumed.
14471447
14481448
Returns
14491449
-------
1450-
self : returns an instance of self.
1450+
self : object
1451+
Returns an instance of self.
14511452
"""
14521453
self._validate_params(for_partial_fit=True)
14531454
return self._partial_fit(
@@ -1521,10 +1522,10 @@ def fit(self, X, y, coef_init=None, intercept_init=None, sample_weight=None):
15211522
Parameters
15221523
----------
15231524
X : {array-like, sparse matrix}, shape (n_samples, n_features)
1524-
Training data
1525+
Training data.
15251526
15261527
y : ndarray of shape (n_samples,)
1527-
Target values
1528+
Target values.
15281529
15291530
coef_init : ndarray of shape (n_features,), default=None
15301531
The initial coefficients to warm-start the optimization.
@@ -1537,7 +1538,8 @@ def fit(self, X, y, coef_init=None, intercept_init=None, sample_weight=None):
15371538
15381539
Returns
15391540
-------
1540-
self : returns an instance of self.
1541+
self : object
1542+
Fitted `SGDRegressor` estimator.
15411543
"""
15421544
return self._fit(
15431545
X,
@@ -1571,11 +1573,12 @@ def _decision_function(self, X):
15711573
return scores.ravel()
15721574

15731575
def predict(self, X):
1574-
"""Predict using the linear model
1576+
"""Predict using the linear model.
15751577
15761578
Parameters
15771579
----------
15781580
X : {array-like, sparse matrix}, shape (n_samples, n_features)
1581+
Input data.
15791582
15801583
Returns
15811584
-------
@@ -1670,7 +1673,7 @@ def _fit_regressor(
16701673

16711674

16721675
class SGDRegressor(BaseSGDRegressor):
1673-
"""Linear model fitted by minimizing a regularized empirical loss with SGD
1676+
"""Linear model fitted by minimizing a regularized empirical loss with SGD.
16741677
16751678
SGD stands for Stochastic Gradient Descent: the gradient of the loss is
16761679
estimated each sample at a time and the model is updated along the way with
@@ -1765,7 +1768,7 @@ class SGDRegressor(BaseSGDRegressor):
17651768
Pass an int for reproducible output across multiple function calls.
17661769
See :term:`Glossary <random_state>`.
17671770
1768-
learning_rate : string, default='invscaling'
1771+
learning_rate : str, default='invscaling'
17691772
The learning rate schedule:
17701773
17711774
- 'constant': `eta = eta0`
@@ -1861,6 +1864,16 @@ class SGDRegressor(BaseSGDRegressor):
18611864
18621865
.. versionadded:: 1.0
18631866
1867+
See Also
1868+
--------
1869+
HuberRegressor : Linear regression model that is robust to outliers.
1870+
Lars : Least Angle Regression model.
1871+
Lasso : Linear Model trained with L1 prior as regularizer.
1872+
RANSACRegressor : RANSAC (RANdom SAmple Consensus) algorithm.
1873+
Ridge : Linear least squares with l2 regularization.
1874+
sklearn.svm.SVR : Epsilon-Support Vector Regression.
1875+
TheilSenRegressor : Theil-Sen Estimator robust multivariate regression model.
1876+
18641877
Examples
18651878
--------
18661879
>>> import numpy as np
@@ -1877,11 +1890,6 @@ class SGDRegressor(BaseSGDRegressor):
18771890
>>> reg.fit(X, y)
18781891
Pipeline(steps=[('standardscaler', StandardScaler()),
18791892
('sgdregressor', SGDRegressor())])
1880-
1881-
See Also
1882-
--------
1883-
Ridge, ElasticNet, Lasso, sklearn.svm.SVR
1884-
18851893
"""
18861894

18871895
def __init__(

0 commit comments

Comments
 (0)
0