@@ -262,7 +262,7 @@ def ridge_regression(X, y, alpha, sample_weight=None, solver='auto',
262
262
assumed to be specific to the targets. Hence they must correspond in
263
263
number.
264
264
265
- sample_weight : float or numpy array of shape [n_samples], default None
265
+ sample_weight : float or numpy array of shape [n_samples], default= None
266
266
Individual weights for each sample. If sample_weight is not None and
267
267
solver='auto', the solver will be set to 'cholesky'.
268
268
@@ -314,10 +314,10 @@ def ridge_regression(X, y, alpha, sample_weight=None, solver='auto',
314
314
by scipy.sparse.linalg. For 'sag' and saga solver, the default value is
315
315
1000.
316
316
317
- tol : float, default 1e-3
317
+ tol : float, default= 1e-3
318
318
Precision of the solution.
319
319
320
- verbose : int, default 0
320
+ verbose : int, default= 0
321
321
Verbosity level. Setting verbose > 0 will display additional
322
322
information depending on the solver used.
323
323
@@ -328,21 +328,21 @@ def ridge_regression(X, y, alpha, sample_weight=None, solver='auto',
328
328
generator; If None, the random number generator is the RandomState
329
329
instance used by `np.random`. Used when ``solver`` == 'sag'.
330
330
331
- return_n_iter : boolean, default False
331
+ return_n_iter : boolean, default= False
332
332
If True, the method also returns `n_iter`, the actual number of
333
333
iteration performed by the solver.
334
334
335
335
.. versionadded:: 0.17
336
336
337
- return_intercept : boolean, default False
337
+ return_intercept : boolean, default= False
338
338
If True and if X is sparse, the method also returns the intercept,
339
339
and the solver is automatically changed to 'sag'. This is only a
340
340
temporary fix for fitting the intercept with sparse data. For dense
341
341
data, use sklearn.linear_model._preprocess_data before your regression.
342
342
343
343
.. versionadded:: 0.17
344
344
345
- check_input : boolean, default True
345
+ check_input : boolean, default= True
346
346
If False, the input arrays X and y will not be checked.
347
347
348
348
.. versionadded:: 0.21
@@ -619,7 +619,7 @@ class Ridge(_BaseRidge, RegressorMixin):
619
619
620
620
Parameters
621
621
----------
622
- alpha : {float, array-like}, shape (n_targets), default 1.0
622
+ alpha : {float, array-like}, shape (n_targets), default= 1.0
623
623
Regularization strength; must be a positive float. Regularization
624
624
improves the conditioning of the problem and reduces the variance of
625
625
the estimates. Larger values specify stronger regularization.
@@ -628,28 +628,28 @@ class Ridge(_BaseRidge, RegressorMixin):
628
628
assumed to be specific to the targets. Hence they must correspond in
629
629
number.
630
630
631
- fit_intercept : bool, default True
631
+ fit_intercept : bool, default= True
632
632
Whether to calculate the intercept for this model. If set
633
633
to false, no intercept will be used in calculations
634
634
(e.g. data is expected to be already centered).
635
635
636
- normalize : boolean, optional, default False
636
+ normalize : boolean, optional, default= False
637
637
This parameter is ignored when ``fit_intercept`` is set to False.
638
638
If True, the regressors X will be normalized before regression by
639
639
subtracting the mean and dividing by the l2-norm.
640
640
If you wish to standardize, please use
641
641
:class:`sklearn.preprocessing.StandardScaler` before calling ``fit``
642
642
on an estimator with ``normalize=False``.
643
643
644
- copy_X : boolean, optional, default True
644
+ copy_X : boolean, optional, default= True
645
645
If True, X will be copied; else, it may be overwritten.
646
646
647
647
max_iter : int, optional
648
648
Maximum number of iterations for conjugate gradient solver.
649
649
For 'sparse_cg' and 'lsqr' solvers, the default value is determined
650
650
by scipy.sparse.linalg. For 'sag' solver, the default value is 1000.
651
651
652
- tol : float, default 1e-3
652
+ tol : float, default= 1e-3
653
653
Precision of the solution.
654
654
655
655
solver : {'auto', 'svd', 'cholesky', 'lsqr', 'sparse_cg', 'sag', 'saga'}
@@ -771,34 +771,34 @@ class RidgeClassifier(LinearClassifierMixin, _BaseRidge):
771
771
772
772
Parameters
773
773
----------
774
- alpha : float, default 1.0
774
+ alpha : float, default= 1.0
775
775
Regularization strength; must be a positive float. Regularization
776
776
improves the conditioning of the problem and reduces the variance of
777
777
the estimates. Larger values specify stronger regularization.
778
778
Alpha corresponds to ``C^-1`` in other linear models such as
779
779
LogisticRegression or LinearSVC.
780
780
781
- fit_intercept : bool, default True
781
+ fit_intercept : bool, default= True
782
782
Whether to calculate the intercept for this model. If set to false, no
783
783
intercept will be used in calculations (e.g. data is expected to be
784
784
already centered).
785
785
786
- normalize : boolean, optional, default False
786
+ normalize : boolean, optional, default= False
787
787
This parameter is ignored when ``fit_intercept`` is set to False.
788
788
If True, the regressors X will be normalized before regression by
789
789
subtracting the mean and dividing by the l2-norm.
790
790
If you wish to standardize, please use
791
791
:class:`sklearn.preprocessing.StandardScaler` before calling ``fit``
792
792
on an estimator with ``normalize=False``.
793
793
794
- copy_X : boolean, optional, default True
794
+ copy_X : boolean, optional, default= True
795
795
If True, X will be copied; else, it may be overwritten.
796
796
797
797
max_iter : int, optional
798
798
Maximum number of iterations for conjugate gradient solver.
799
799
The default value is determined by scipy.sparse.linalg.
800
800
801
- tol : float, default 1e-3
801
+ tol : float, default= 1e-3
802
802
Precision of the solution.
803
803
804
804
class_weight : dict or 'balanced', optional
@@ -843,7 +843,7 @@ class RidgeClassifier(LinearClassifierMixin, _BaseRidge):
843
843
.. versionadded:: 0.19
844
844
SAGA solver.
845
845
846
- random_state : int, RandomState instance or None, optional, default None
846
+ random_state : int, RandomState instance or None, optional, default= None
847
847
The seed of the pseudo random number generator to use when shuffling
848
848
the data. If int, random_state is the seed used by the random number
849
849
generator; If RandomState instance, random_state is the random number
@@ -906,7 +906,7 @@ def fit(self, X, y, sample_weight=None):
906
906
y : array-like, shape = [n_samples]
907
907
Target values
908
908
909
- sample_weight : {float, numpy array}, shape (n_samples), default None
909
+ sample_weight : {float, numpy array}, shape (n_samples), default= None
910
910
Sample weight.
911
911
912
912
.. versionadded:: 0.17
@@ -1586,7 +1586,7 @@ class RidgeCV(_BaseRidgeCV, RegressorMixin):
1586
1586
1587
1587
Parameters
1588
1588
----------
1589
- alphas : numpy array of shape [n_alphas], default (0.1, 1.0, 10.0)
1589
+ alphas : numpy array of shape [n_alphas], default= (0.1, 1.0, 10.0)
1590
1590
Array of alpha values to try.
1591
1591
Regularization strength; must be a positive float. Regularization
1592
1592
improves the conditioning of the problem and reduces the variance of
@@ -1595,20 +1595,20 @@ class RidgeCV(_BaseRidgeCV, RegressorMixin):
1595
1595
LogisticRegression or LinearSVC.
1596
1596
If using generalized cross-validation, alphas must be positive.
1597
1597
1598
- fit_intercept : bool, default True
1598
+ fit_intercept : bool, default= True
1599
1599
Whether to calculate the intercept for this model. If set
1600
1600
to false, no intercept will be used in calculations
1601
1601
(e.g. data is expected to be already centered).
1602
1602
1603
- normalize : boolean, optional, default False
1603
+ normalize : boolean, optional, default= False
1604
1604
This parameter is ignored when ``fit_intercept`` is set to False.
1605
1605
If True, the regressors X will be normalized before regression by
1606
1606
subtracting the mean and dividing by the l2-norm.
1607
1607
If you wish to standardize, please use
1608
1608
:class:`sklearn.preprocessing.StandardScaler` before calling ``fit``
1609
1609
on an estimator with ``normalize=False``.
1610
1610
1611
- scoring : string, callable or None, optional, default: None
1611
+ scoring : string, callable or None, optional, default= None
1612
1612
A string (see model evaluation documentation) or
1613
1613
a scorer callable object / function with signature
1614
1614
``scorer(estimator, X, y)``.
@@ -1700,28 +1700,28 @@ class RidgeClassifierCV(LinearClassifierMixin, _BaseRidgeCV):
1700
1700
1701
1701
Parameters
1702
1702
----------
1703
- alphas : numpy array of shape [n_alphas], default (0.1, 1.0, 10.0)
1703
+ alphas : numpy array of shape [n_alphas], default= (0.1, 1.0, 10.0)
1704
1704
Array of alpha values to try.
1705
1705
Regularization strength; must be a positive float. Regularization
1706
1706
improves the conditioning of the problem and reduces the variance of
1707
1707
the estimates. Larger values specify stronger regularization.
1708
1708
Alpha corresponds to ``C^-1`` in other linear models such as
1709
1709
LogisticRegression or LinearSVC.
1710
1710
1711
- fit_intercept : bool, default True
1711
+ fit_intercept : bool, default= True
1712
1712
Whether to calculate the intercept for this model. If set
1713
1713
to false, no intercept will be used in calculations
1714
1714
(e.g. data is expected to be already centered).
1715
1715
1716
- normalize : boolean, optional, default False
1716
+ normalize : boolean, optional, default= False
1717
1717
This parameter is ignored when ``fit_intercept`` is set to False.
1718
1718
If True, the regressors X will be normalized before regression by
1719
1719
subtracting the mean and dividing by the l2-norm.
1720
1720
If you wish to standardize, please use
1721
1721
:class:`sklearn.preprocessing.StandardScaler` before calling ``fit``
1722
1722
on an estimator with ``normalize=False``.
1723
1723
1724
- scoring : string, callable or None, optional, default: None
1724
+ scoring : string, callable or None, optional, default= None
1725
1725
A string (see model evaluation documentation) or
1726
1726
a scorer callable object / function with signature
1727
1727
``scorer(estimator, X, y)``.
@@ -1815,7 +1815,7 @@ def fit(self, X, y, sample_weight=None):
1815
1815
y : array-like, shape (n_samples,)
1816
1816
Target values. Will be cast to X's dtype if necessary
1817
1817
1818
- sample_weight : {float, numpy array}, shape (n_samples), default None
1818
+ sample_weight : {float, numpy array}, shape (n_samples), default= None
1819
1819
Sample weight.
1820
1820
1821
1821
Returns
0 commit comments