@@ -1436,18 +1436,19 @@ def partial_fit(self, X, y, sample_weight=None):
1436
1436
Parameters
1437
1437
----------
1438
1438
X : {array-like, sparse matrix}, shape (n_samples, n_features)
1439
- Subset of training data
1439
+ Subset of training data.
1440
1440
1441
1441
y : numpy array of shape (n_samples,)
1442
- Subset of target values
1442
+ Subset of target values.
1443
1443
1444
1444
sample_weight : array-like, shape (n_samples,), default=None
1445
1445
Weights applied to individual samples.
1446
1446
If not provided, uniform weights are assumed.
1447
1447
1448
1448
Returns
1449
1449
-------
1450
- self : returns an instance of self.
1450
+ self : object
1451
+ Returns an instance of self.
1451
1452
"""
1452
1453
self ._validate_params (for_partial_fit = True )
1453
1454
return self ._partial_fit (
@@ -1521,10 +1522,10 @@ def fit(self, X, y, coef_init=None, intercept_init=None, sample_weight=None):
1521
1522
Parameters
1522
1523
----------
1523
1524
X : {array-like, sparse matrix}, shape (n_samples, n_features)
1524
- Training data
1525
+ Training data.
1525
1526
1526
1527
y : ndarray of shape (n_samples,)
1527
- Target values
1528
+ Target values.
1528
1529
1529
1530
coef_init : ndarray of shape (n_features,), default=None
1530
1531
The initial coefficients to warm-start the optimization.
@@ -1537,7 +1538,8 @@ def fit(self, X, y, coef_init=None, intercept_init=None, sample_weight=None):
1537
1538
1538
1539
Returns
1539
1540
-------
1540
- self : returns an instance of self.
1541
+ self : object
1542
+ Fitted `SGDRegressor` estimator.
1541
1543
"""
1542
1544
return self ._fit (
1543
1545
X ,
@@ -1571,11 +1573,12 @@ def _decision_function(self, X):
1571
1573
return scores .ravel ()
1572
1574
1573
1575
def predict (self , X ):
1574
- """Predict using the linear model
1576
+ """Predict using the linear model.
1575
1577
1576
1578
Parameters
1577
1579
----------
1578
1580
X : {array-like, sparse matrix}, shape (n_samples, n_features)
1581
+ Input data.
1579
1582
1580
1583
Returns
1581
1584
-------
@@ -1670,7 +1673,7 @@ def _fit_regressor(
1670
1673
1671
1674
1672
1675
class SGDRegressor (BaseSGDRegressor ):
1673
- """Linear model fitted by minimizing a regularized empirical loss with SGD
1676
+ """Linear model fitted by minimizing a regularized empirical loss with SGD.
1674
1677
1675
1678
SGD stands for Stochastic Gradient Descent: the gradient of the loss is
1676
1679
estimated each sample at a time and the model is updated along the way with
@@ -1765,7 +1768,7 @@ class SGDRegressor(BaseSGDRegressor):
1765
1768
Pass an int for reproducible output across multiple function calls.
1766
1769
See :term:`Glossary <random_state>`.
1767
1770
1768
- learning_rate : string , default='invscaling'
1771
+ learning_rate : str , default='invscaling'
1769
1772
The learning rate schedule:
1770
1773
1771
1774
- 'constant': `eta = eta0`
@@ -1861,6 +1864,16 @@ class SGDRegressor(BaseSGDRegressor):
1861
1864
1862
1865
.. versionadded:: 1.0
1863
1866
1867
+ See Also
1868
+ --------
1869
+ HuberRegressor : Linear regression model that is robust to outliers.
1870
+ Lars : Least Angle Regression model.
1871
+ Lasso : Linear Model trained with L1 prior as regularizer.
1872
+ RANSACRegressor : RANSAC (RANdom SAmple Consensus) algorithm.
1873
+ Ridge : Linear least squares with l2 regularization.
1874
+ sklearn.svm.SVR : Epsilon-Support Vector Regression.
1875
+ TheilSenRegressor : Theil-Sen Estimator robust multivariate regression model.
1876
+
1864
1877
Examples
1865
1878
--------
1866
1879
>>> import numpy as np
@@ -1877,11 +1890,6 @@ class SGDRegressor(BaseSGDRegressor):
1877
1890
>>> reg.fit(X, y)
1878
1891
Pipeline(steps=[('standardscaler', StandardScaler()),
1879
1892
('sgdregressor', SGDRegressor())])
1880
-
1881
- See Also
1882
- --------
1883
- Ridge, ElasticNet, Lasso, sklearn.svm.SVR
1884
-
1885
1893
"""
1886
1894
1887
1895
def __init__ (
0 commit comments