@@ -81,7 +81,7 @@ class SGDClassifier(BaseSGD, ClassifierMixin, SelectorMixin):
81
81
not achievable with 'l2'.
82
82
83
83
alpha : float
84
- Constant that multiplies the regularization term. Defaults to 0.0001
84
+ Constant that multiplies the regularization term. Defaults to 0.01
85
85
86
86
rho : float
87
87
The Elastic Net mixing parameter, with 0 < rho <= 1.
@@ -93,7 +93,7 @@ class SGDClassifier(BaseSGD, ClassifierMixin, SelectorMixin):
93
93
94
94
n_iter: int, optional
95
95
The number of passes over the training data (aka epochs).
96
- Defaults to 5 .
96
+ Defaults to 20 .
97
97
98
98
shuffle: bool, optional
99
99
Whether or not the training data should be shuffled after each epoch.
@@ -154,9 +154,9 @@ class SGDClassifier(BaseSGD, ClassifierMixin, SelectorMixin):
154
154
>>> clf = linear_model.SGDClassifier()
155
155
>>> clf.fit(X, Y)
156
156
... #doctest: +NORMALIZE_WHITESPACE
157
- SGDClassifier(alpha=0.0001 , class_weight=None, epsilon=0.1, eta0=0.0,
157
+ SGDClassifier(alpha=0.01 , class_weight=None, epsilon=0.1, eta0=0.0,
158
158
fit_intercept=True, learning_rate='optimal', loss='hinge',
159
- n_iter=5 , n_jobs=1, penalty='l2', power_t=0.5, rho=0.85, seed=0,
159
+ n_iter=20 , n_jobs=1, penalty='l2', power_t=0.5, rho=0.85, seed=0,
160
160
shuffle=False, verbose=0, warm_start=False)
161
161
>>> print(clf.predict([[-0.8, -1]]))
162
162
[1]
@@ -166,8 +166,8 @@ class SGDClassifier(BaseSGD, ClassifierMixin, SelectorMixin):
166
166
LinearSVC, LogisticRegression, Perceptron
167
167
168
168
"""
169
- def __init__ (self , loss = "hinge" , penalty = 'l2' , alpha = 0.0001 ,
170
- rho = 0.85 , fit_intercept = True , n_iter = 5 , shuffle = False ,
169
+ def __init__ (self , loss = "hinge" , penalty = 'l2' , alpha = 0.01 ,
170
+ rho = 0.85 , fit_intercept = True , n_iter = 20 , shuffle = False ,
171
171
verbose = 0 , epsilon = 0.1 , n_jobs = 1 , seed = 0 ,
172
172
learning_rate = "optimal" , eta0 = 0.0 , power_t = 0.5 ,
173
173
class_weight = None , warm_start = False ):
@@ -610,7 +610,7 @@ class SGDRegressor(BaseSGD, RegressorMixin, SelectorMixin):
610
610
>>> X = np.random.randn(n_samples, n_features)
611
611
>>> clf = linear_model.SGDRegressor()
612
612
>>> clf.fit(X, y)
613
- SGDRegressor(alpha=0.0001 , epsilon=0.1, eta0=0.01, fit_intercept=True,
613
+ SGDRegressor(alpha=0.01 , epsilon=0.1, eta0=0.01, fit_intercept=True,
614
614
learning_rate='invscaling', loss='squared_loss', n_iter=5, p=None,
615
615
penalty='l2', power_t=0.25, rho=0.85, seed=0, shuffle=False,
616
616
verbose=0, warm_start=False)
@@ -620,7 +620,7 @@ class SGDRegressor(BaseSGD, RegressorMixin, SelectorMixin):
620
620
Ridge, ElasticNet, Lasso, SVR
621
621
622
622
"""
623
- def __init__ (self , loss = "squared_loss" , penalty = "l2" , alpha = 0.0001 ,
623
+ def __init__ (self , loss = "squared_loss" , penalty = "l2" , alpha = 0.01 ,
624
624
rho = 0.85 , fit_intercept = True , n_iter = 5 , shuffle = False , verbose = 0 ,
625
625
epsilon = 0.1 , p = None , seed = 0 , learning_rate = "invscaling" , eta0 = 0.01 ,
626
626
power_t = 0.25 , warm_start = False ):
0 commit comments