2121from ..base import RegressorMixin , MultiOutputMixin
2222# mypy error: Module 'sklearn.utils' has no attribute 'arrayfuncs'
2323from ..utils import arrayfuncs , as_float_array # type: ignore
24+ from ..utils import check_random_state
2425from ..model_selection import check_cv
2526from ..exceptions import ConvergenceWarning
2627
@@ -800,6 +801,16 @@ class Lars(MultiOutputMixin, RegressorMixin, LinearModel):
800801 setting ``fit_path`` to ``False`` will lead to a speedup, especially
801802 with a small alpha.
802803
804+ jitter : float, default=None
805+ Upper bound on a uniform noise parameter to be added to the
806+ `y` values, to satisfy the model's assumption of
807+ one-at-a-time computations. Might help with stability.
808+
809+ random_state : int, RandomState instance or None (default)
810+ Determines random number generation for jittering. Pass an int
811+ for reproducible output across multiple function calls.
812+ See :term:`Glossary <random_state>`. Ignored if `jitter` is None.
813+
803814 Attributes
804815 ----------
805816 alphas_ : array-like of shape (n_alphas + 1,) | list of n_targets such \
@@ -846,7 +857,8 @@ class Lars(MultiOutputMixin, RegressorMixin, LinearModel):
846857
847858 def __init__ (self , fit_intercept = True , verbose = False , normalize = True ,
848859 precompute = 'auto' , n_nonzero_coefs = 500 ,
849- eps = np .finfo (np .float ).eps , copy_X = True , fit_path = True ):
860+ eps = np .finfo (np .float ).eps , copy_X = True , fit_path = True ,
861+ jitter = None , random_state = None ):
850862 self .fit_intercept = fit_intercept
851863 self .verbose = verbose
852864 self .normalize = normalize
@@ -855,6 +867,8 @@ def __init__(self, fit_intercept=True, verbose=False, normalize=True,
855867 self .eps = eps
856868 self .copy_X = copy_X
857869 self .fit_path = fit_path
870+ self .jitter = jitter
871+ self .random_state = random_state
858872
859873 @staticmethod
860874 def _get_gram (precompute , X , y ):
@@ -954,6 +968,12 @@ def fit(self, X, y, Xy=None):
954968 else :
955969 max_iter = self .max_iter
956970
971+ if self .jitter is not None :
972+ rng = check_random_state (self .random_state )
973+
974+ noise = rng .uniform (high = self .jitter , size = len (y ))
975+ y = y + noise
976+
957977 self ._fit (X , y , max_iter = max_iter , alpha = alpha , fit_path = self .fit_path ,
958978 Xy = Xy )
959979
@@ -1031,6 +1051,16 @@ class LassoLars(Lars):
10311051 algorithm are typically in congruence with the solution of the
10321052 coordinate descent Lasso estimator.
10331053
1054+ jitter : float, default=None
1055+ Upper bound on a uniform noise parameter to be added to the
1056+ `y` values, to satisfy the model's assumption of
1057+ one-at-a-time computations. Might help with stability.
1058+
1059+ random_state : int, RandomState instance or None (default)
1060+ Determines random number generation for jittering. Pass an int
1061+ for reproducible output across multiple function calls.
1062+ See :term:`Glossary <random_state>`. Ignored if `jitter` is None.
1063+
10341064 Attributes
10351065 ----------
10361066 alphas_ : array-like of shape (n_alphas + 1,) | list of n_targets such \
@@ -1083,7 +1113,7 @@ class LassoLars(Lars):
10831113 def __init__ (self , alpha = 1.0 , fit_intercept = True , verbose = False ,
10841114 normalize = True , precompute = 'auto' , max_iter = 500 ,
10851115 eps = np .finfo (np .float ).eps , copy_X = True , fit_path = True ,
1086- positive = False ):
1116+ positive = False , jitter = None , random_state = None ):
10871117 self .alpha = alpha
10881118 self .fit_intercept = fit_intercept
10891119 self .max_iter = max_iter
@@ -1094,6 +1124,8 @@ def __init__(self, alpha=1.0, fit_intercept=True, verbose=False,
10941124 self .copy_X = copy_X
10951125 self .eps = eps
10961126 self .fit_path = fit_path
1127+ self .jitter = jitter
1128+ self .random_state = random_state
10971129
10981130
10991131###############################################################################
0 commit comments