Closed
Description
Here is the last scheduled run (from 1 day ago) that passed:
and here is a more recent run that failed (all CI is failing today):
FAILED tests/test_common.py::test_estimators[LinearRegression(positive=True)-check_sample_weight_equivalence_on_dense_data] - AssertionError:
FAILED utils/tests/test_estimator_checks.py::test_check_estimator_clones - AssertionError:
= 2 failed, 34214 passed, 4182 skipped, 174 xfailed, 66 xpassed, 4252 warnings in 1489.21s (0:24:49) =
Full failure log:
2025-03-28T06:36:32.3433619Z =================================== FAILURES ===================================
2025-03-28T06:36:32.3434358Z �[31m�[1m_ test_estimators[LinearRegression(positive=True)-check_sample_weight_equivalence_on_dense_data] _�[0m
2025-03-28T06:36:32.3434613Z
2025-03-28T06:36:32.3434838Z estimator = LinearRegression(positive=True)
2025-03-28T06:36:32.3435117Z check = functools.partial(<function check_sample_weight_equivalence_on_dense_data at 0xd8591e88>, 'LinearRegression')
2025-03-28T06:36:32.3435705Z request = <FixtureRequest for <Function test_estimators[LinearRegression(positive=True)-check_sample_weight_equivalence_on_dense_data]>>
2025-03-28T06:36:32.3435878Z
2025-03-28T06:36:32.3436047Z @parametrize_with_checks(
2025-03-28T06:36:32.3436274Z list(_tested_estimators()), expected_failed_checks=_get_expected_failed_checks
2025-03-28T06:36:32.3436498Z )
2025-03-28T06:36:32.3436684Z def test_estimators(estimator, check, request):
2025-03-28T06:36:32.3436909Z # Common tests for estimator instances
2025-03-28T06:36:32.3437101Z with ignore_warnings(
2025-03-28T06:36:32.3437316Z category=(FutureWarning, ConvergenceWarning, UserWarning, LinAlgWarning)
2025-03-28T06:36:32.3437521Z ):
2025-03-28T06:36:32.3437708Z > check(estimator)
2025-03-28T06:36:32.3437793Z
2025-03-28T06:36:32.3438019Z check = functools.partial(<function check_sample_weight_equivalence_on_dense_data at 0xd8591e88>, 'LinearRegression')
2025-03-28T06:36:32.3438293Z estimator = LinearRegression(positive=True)
2025-03-28T06:36:32.3438559Z request = <FixtureRequest for <Function test_estimators[LinearRegression(positive=True)-check_sample_weight_equivalence_on_dense_data]>>
2025-03-28T06:36:32.3438707Z
2025-03-28T06:36:32.3439155Z �[1m�[31m/io/sklearn/tests/test_common.py�[0m:122:
2025-03-28T06:36:32.3439405Z _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
2025-03-28T06:36:32.3439768Z �[1m�[31m/io/sklearn/utils/estimator_checks.py�[0m:1570: in check_sample_weight_equivalence_on_dense_data
2025-03-28T06:36:32.3440046Z _check_sample_weight_equivalence(name, estimator_orig, sparse_container=None)
2025-03-28T06:36:32.3440296Z estimator_orig = LinearRegression(positive=True)
2025-03-28T06:36:32.3440498Z name = 'LinearRegression'
2025-03-28T06:36:32.3440774Z �[1m�[31m/io/sklearn/utils/_testing.py�[0m:145: in wrapper
2025-03-28T06:36:32.3440988Z return fn(*args, **kwargs)
2025-03-28T06:36:32.3441218Z args = ('LinearRegression', LinearRegression(positive=True))
2025-03-28T06:36:32.3441452Z fn = <function _check_sample_weight_equivalence at 0xd8591de8>
2025-03-28T06:36:32.3441744Z kwargs = {'sparse_container': None}
2025-03-28T06:36:32.3441952Z self = _IgnoreWarnings(record=True)
2025-03-28T06:36:32.3442307Z �[1m�[31m/io/sklearn/utils/estimator_checks.py�[0m:1566: in _check_sample_weight_equivalence
2025-03-28T06:36:32.3442654Z assert_allclose_dense_sparse(X_pred1, X_pred2, err_msg=err_msg)
2025-03-28T06:36:32.3442905Z X = array([[0.37454012, 0.95071431, 0.73199394, 0.59865848, 0.15601864,
2025-03-28T06:36:32.3443158Z 0.15599452, 0.05808361, 0.86617615, 0.6011..., 0.98663958, 0.3742708 , 0.37064215, 0.81279957,
2025-03-28T06:36:32.3443423Z 0.94724858, 0.98600106, 0.75337819, 0.37625959, 0.08350072]])
2025-03-28T06:36:32.3443662Z X_pred1 = array([ 8.88178420e-16, 1.00000000e+00, 2.00000000e+00, 1.18549798e+00,
2025-03-28T06:36:32.3443929Z 4.06241761e+00, 1.00000000e+00, 2...5767e+00, 2.00000000e+00, -2.79936287e-02, -8.90642835e-01,
2025-03-28T06:36:32.3444177Z -8.00809991e-01, 1.00000000e+00, 1.00000000e+00])
2025-03-28T06:36:32.3444412Z X_pred2 = array([0. , 1. , 2. , 0.94186541, 1.72670876,
2025-03-28T06:36:32.3444772Z 1. , 2. , 2. , 1.8723887 , 2. ,
2025-03-28T06:36:32.3444987Z 1.50877777, 0.76107365, 1.70933257, 1. , 1. ])
2025-03-28T06:36:32.3445218Z X_repeated = array([[0.37454012, 0.95071431, 0.73199394, 0.59865848, 0.15601864,
2025-03-28T06:36:32.3445494Z 0.15599452, 0.05808361, 0.86617615, 0.6011..., 0.98663958, 0.3742708 , 0.37064215, 0.81279957,
2025-03-28T06:36:32.3445740Z 0.94724858, 0.98600106, 0.75337819, 0.37625959, 0.08350072]])
2025-03-28T06:36:32.3445976Z X_weighted = array([[0.60754485, 0.17052412, 0.06505159, 0.94888554, 0.96563203,
2025-03-28T06:36:32.3446499Z 0.80839735, 0.30461377, 0.09767211, 0.6842..., 0.69673717, 0.62894285, 0.87747201, 0.73507104,
2025-03-28T06:36:32.3446765Z 0.80348093, 0.28203457, 0.17743954, 0.75061475, 0.80683474]])
2025-03-28T06:36:32.3447241Z err_msg = 'Comparing the output of LinearRegression.predict revealed that fitting with `sample_weight` is not equivalent to fitting with removed or repeated data points.'
2025-03-28T06:36:32.3447553Z estimator_orig = LinearRegression(positive=True)
2025-03-28T06:36:32.3448930Z estimator_repeated = LinearRegression(positive=True)
2025-03-28T06:36:32.3449323Z estimator_weighted = LinearRegression(positive=True)
2025-03-28T06:36:32.3449572Z method = 'predict'
2025-03-28T06:36:32.3449847Z n_samples = 15
2025-03-28T06:36:32.3450076Z name = 'LinearRegression'
2025-03-28T06:36:32.3450329Z rng = RandomState(MT19937) at 0xCB6EECE8
2025-03-28T06:36:32.3450580Z sparse_container = None
2025-03-28T06:36:32.3450850Z sw = array([3, 4, 0, 3, 1, 0, 4, 4, 0, 3, 0, 0, 3, 2, 0])
2025-03-28T06:36:32.3451124Z y = array([0, 1, 2, 2, 1, 1, 2, 2, 1, 2, 0, 0, 1, 1, 1])
2025-03-28T06:36:32.3451409Z y_repeated = array([0, 0, 0, 1, 1, 1, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2025-03-28T06:36:32.3451823Z 1, 1, 1, 1, 1])
2025-03-28T06:36:32.3452390Z y_weighted = array([1, 2, 1, 2, 1, 1, 2, 1, 0, 2, 0, 2, 0, 1, 1])
2025-03-28T06:36:32.3467337Z �[1m�[31m/io/sklearn/utils/_testing.py�[0m:283: in assert_allclose_dense_sparse
2025-03-28T06:36:32.3468147Z assert_allclose(x, y, rtol=rtol, atol=atol, err_msg=err_msg)
2025-03-28T06:36:32.3468478Z atol = 1e-09
2025-03-28T06:36:32.3468965Z err_msg = 'Comparing the output of LinearRegression.predict revealed that fitting with `sample_weight` is not equivalent to fitting with removed or repeated data points.'
2025-03-28T06:36:32.3469366Z rtol = 1e-07
2025-03-28T06:36:32.3469659Z x = array([ 8.88178420e-16, 1.00000000e+00, 2.00000000e+00, 1.18549798e+00,
2025-03-28T06:36:32.3470005Z 4.06241761e+00, 1.00000000e+00, 2...5767e+00, 2.00000000e+00, -2.79936287e-02, -8.90642835e-01,
2025-03-28T06:36:32.3470354Z -8.00809991e-01, 1.00000000e+00, 1.00000000e+00])
2025-03-28T06:36:32.3470648Z y = array([0. , 1. , 2. , 0.94186541, 1.72670876,
2025-03-28T06:36:32.3470947Z 1. , 2. , 2. , 1.8723887 , 2. ,
2025-03-28T06:36:32.3471226Z 1.50877777, 0.76107365, 1.70933257, 1. , 1. ])
2025-03-28T06:36:32.3471533Z _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
2025-03-28T06:36:32.3471702Z
2025-03-28T06:36:32.3472165Z actual = array([ 8.88178420e-16, 1.00000000e+00, 2.00000000e+00, 1.18549798e+00,
2025-03-28T06:36:32.3472656Z 4.06241761e+00, 1.00000000e+00, 2...5767e+00, 2.00000000e+00, -2.79936287e-02, -8.90642835e-01,
2025-03-28T06:36:32.3473239Z -8.00809991e-01, 1.00000000e+00, 1.00000000e+00])
2025-03-28T06:36:32.3473561Z desired = array([0. , 1. , 2. , 0.94186541, 1.72670876,
2025-03-28T06:36:32.3473863Z 1. , 2. , 2. , 1.8723887 , 2. ,
2025-03-28T06:36:32.3474355Z 1.50877777, 0.76107365, 1.70933257, 1. , 1. ])
2025-03-28T06:36:32.3474896Z rtol = 1e-07, atol = 1e-09, equal_nan = True
2025-03-28T06:36:32.3475293Z err_msg = 'Comparing the output of LinearRegression.predict revealed that fitting with `sample_weight` is not equivalent to fitting with removed or repeated data points.'
2025-03-28T06:36:32.3475668Z verbose = True
2025-03-28T06:36:32.3475847Z
2025-03-28T06:36:32.3476109Z def assert_allclose(
2025-03-28T06:36:32.3476425Z actual, desired, rtol=None, atol=0.0, equal_nan=True, err_msg="", verbose=True
2025-03-28T06:36:32.3476851Z ):
2025-03-28T06:36:32.3477457Z """dtype-aware variant of numpy.testing.assert_allclose
2025-03-28T06:36:32.3477743Z
2025-03-28T06:36:32.3478184Z This variant introspects the least precise floating point dtype
2025-03-28T06:36:32.3478502Z in the input argument and automatically sets the relative tolerance
2025-03-28T06:36:32.3478839Z parameter to 1e-4 float32 and use 1e-7 otherwise (typically float64
2025-03-28T06:36:32.3479134Z in scikit-learn).
2025-03-28T06:36:32.3479381Z
2025-03-28T06:36:32.3479837Z `atol` is always left to 0. by default. It should be adjusted manually
2025-03-28T06:36:32.3480176Z to an assertion-specific value in case there are null values expected
2025-03-28T06:36:32.3480467Z in `desired`.
2025-03-28T06:36:32.3480718Z
2025-03-28T06:36:32.3480995Z The aggregate tolerance is `atol + rtol * abs(desired)`.
2025-03-28T06:36:32.3481285Z
2025-03-28T06:36:32.3481704Z Parameters
2025-03-28T06:36:32.3481965Z ----------
2025-03-28T06:36:32.3482420Z actual : array_like
2025-03-28T06:36:32.3483027Z Array obtained.
2025-03-28T06:36:32.3483323Z desired : array_like
2025-03-28T06:36:32.3483598Z Array desired.
2025-03-28T06:36:32.3483881Z rtol : float, optional, default=None
2025-03-28T06:36:32.3484165Z Relative tolerance.
2025-03-28T06:36:32.3484462Z If None, it is set based on the provided arrays' dtypes.
2025-03-28T06:36:32.3484791Z atol : float, optional, default=0.
2025-03-28T06:36:32.3485072Z Absolute tolerance.
2025-03-28T06:36:32.3485449Z equal_nan : bool, optional, default=True
2025-03-28T06:36:32.3485916Z If True, NaNs will compare equal.
2025-03-28T06:36:32.3486379Z err_msg : str, optional, default=''
2025-03-28T06:36:32.3486669Z The error message to be printed in case of failure.
2025-03-28T06:36:32.3486959Z verbose : bool, optional, default=True
2025-03-28T06:36:32.3487445Z If True, the conflicting values are appended to the error message.
2025-03-28T06:36:32.3487721Z
2025-03-28T06:36:32.3487982Z Raises
2025-03-28T06:36:32.3488229Z ------
2025-03-28T06:36:32.3488486Z AssertionError
2025-03-28T06:36:32.3490170Z If actual and desired are not equal up to specified precision.
2025-03-28T06:36:32.3490710Z
2025-03-28T06:36:32.3491058Z See Also
2025-03-28T06:36:32.3491386Z --------
2025-03-28T06:36:32.3491708Z numpy.testing.assert_allclose
2025-03-28T06:36:32.3492016Z
2025-03-28T06:36:32.3492308Z Examples
2025-03-28T06:36:32.3493161Z --------
2025-03-28T06:36:32.3493425Z >>> import numpy as np
2025-03-28T06:36:32.3493712Z >>> from sklearn.utils._testing import assert_allclose
2025-03-28T06:36:32.3493988Z >>> x = [1e-5, 1e-3, 1e-1]
2025-03-28T06:36:32.3494270Z >>> y = np.arccos(np.cos(x))
2025-03-28T06:36:32.3494725Z >>> assert_allclose(x, y, rtol=1e-5, atol=0)
2025-03-28T06:36:32.3495044Z >>> a = np.full(shape=10, fill_value=1e-5, dtype=np.float32)
2025-03-28T06:36:32.3495339Z >>> assert_allclose(a, 1e-5)
2025-03-28T06:36:32.3495595Z """
2025-03-28T06:36:32.3496358Z dtypes = []
2025-03-28T06:36:32.3496662Z
2025-03-28T06:36:32.3496955Z actual, desired = np.asanyarray(actual), np.asanyarray(desired)
2025-03-28T06:36:32.3497629Z dtypes = [actual.dtype, desired.dtype]
2025-03-28T06:36:32.3497899Z
2025-03-28T06:36:32.3498178Z if rtol is None:
2025-03-28T06:36:32.3498471Z rtols = [1e-4 if dtype == np.float32 else 1e-7 for dtype in dtypes]
2025-03-28T06:36:32.3498764Z rtol = max(rtols)
2025-03-28T06:36:32.3499011Z
2025-03-28T06:36:32.3604475Z > np_assert_allclose(
2025-03-28T06:36:32.3607071Z actual,
2025-03-28T06:36:32.3608138Z desired,
2025-03-28T06:36:32.3608886Z rtol=rtol,
2025-03-28T06:36:32.3609217Z atol=atol,
2025-03-28T06:36:32.3625173Z equal_nan=equal_nan,
2025-03-28T06:36:32.3639658Z err_msg=err_msg,
2025-03-28T06:36:32.3640151Z verbose=verbose,
2025-03-28T06:36:32.3640425Z )
2025-03-28T06:36:32.3640827Z �[1m�[31mE AssertionError: �[0m
2025-03-28T06:36:32.3641237Z �[1m�[31mE Not equal to tolerance rtol=1e-07, atol=1e-09�[0m
2025-03-28T06:36:32.3641778Z �[1m�[31mE Comparing the output of LinearRegression.predict revealed that fitting with `sample_weight` is not equivalent to fitting with removed or repeated data points.�[0m
2025-03-28T06:36:32.3642255Z �[1m�[31mE Mismatched elements: 6 / 15 (40%)�[0m
2025-03-28T06:36:32.3642821Z �[1m�[31mE Max absolute difference among violations: 2.51014256�[0m
2025-03-28T06:36:32.3643306Z �[1m�[31mE
A96C
Max relative difference among violations: 2.17024526�[0m
2025-03-28T06:36:32.3643773Z �[1m�[31mE ACTUAL: array([ 8.881784e-16, 1.000000e+00, 2.000000e+00, 1.185498e+00,�[0m
2025-03-28T06:36:32.3644416Z �[1m�[31mE 4.062418e+00, 1.000000e+00, 2.000000e+00, 2.000000e+00,�[0m
2025-03-28T06:36:32.3644878Z �[1m�[31mE 4.105658e+00, 2.000000e+00, -2.799363e-02, -8.906428e-01,�[0m
2025-03-28T06:36:32.3645289Z �[1m�[31mE -8.008100e-01, 1.000000e+00, 1.000000e+00])�[0m
2025-03-28T06:36:32.3645729Z �[1m�[31mE DESIRED: array([0. , 1. , 2. , 0.941865, 1.726709, 1. ,�[0m
2025-03-28T06:36:32.3646166Z �[1m�[31mE 2. , 2. , 1.872389, 2. , 1.508778, 0.761074,�[0m
2025-03-28T06:36:32.3646542Z �[1m�[31mE 1.709333, 1. , 1. ])�[0m
2025-03-28T06:36:32.3646727Z
2025-03-28T06:36:32.3647013Z actual = array([ 8.88178420e-16, 1.00000000e+00, 2.00000000e+00, 1.18549798e+00,
2025-03-28T06:36:32.3647353Z 4.06241761e+00, 1.00000000e+00, 2...5767e+00, 2.00000000e+00, -2.79936287e-02, -8.90642835e-01,
2025-03-28T06:36:32.3647675Z -8.00809991e-01, 1.00000000e+00, 1.00000000e+00])
2025-03-28T06:36:32.3647970Z atol = 1e-09
2025-03-28T06:36:32.3648247Z desired = array([0. , 1. , 2. , 0.94186541, 1.72670876,
2025-03-28T06:36:32.3648536Z 1. , 2. , 2. , 1.8723887 , 2. ,
2025-03-28T06:36:32.3648814Z 1.50877777, 0.76107365, 1.70933257, 1. , 1. ])
2025-03-28T06:36:32.3649119Z dtypes = [dtype('float64'), dtype('float64')]
2025-03-28T06:36:32.3650674Z equal_nan = True
2025-03-28T06:36:32.3664015Z err_msg = 'Comparing the output of LinearRegression.predict revealed that fitting with `sample_weight` is not equivalent to fitting with removed or repeated data points.'
2025-03-28T06:36:32.3664453Z rtol = 1e-07
2025-03-28T06:36:32.3664740Z verbose = True
2025-03-28T06:36:32.3664894Z
2025-03-28T06:36:32.3665331Z �[1m�[31m/io/sklearn/utils/_testing.py�[0m:237: AssertionError
2025-03-28T06:36:32.3665827Z �[31m�[1m_________________________ test_check_estimator_clones __________________________�[0m
2025-03-28T06:36:32.3666186Z
2025-03-28T06:36:32.3666476Z def test_check_estimator_clones():
2025-03-28T06:36:32.3666804Z # check that check_estimator doesn't modify the estimator it receives
2025-03-28T06:36:32.3667086Z
2025-03-28T06:36:32.3667500Z iris = load_iris()
2025-03-28T06:36:32.3667744Z
2025-03-28T06:36:32.3667984Z for Estimator in [
2025-03-28T06:36:32.3668245Z GaussianMixture,
2025-03-28T06:36:32.3668775Z LinearRegression,
2025-03-28T06:36:32.3669031Z SGDClassifier,
2025-03-28T06:36:32.3669267Z PCA,
2025-03-28T06:36:32.3669511Z MiniBatchKMeans,
2025-03-28T06:36:32.3669766Z ]:
2025-03-28T06:36:32.3670005Z # without fitting
2025-03-28T06:36:32.3670276Z with ignore_warnings(category=ConvergenceWarning):
2025-03-28T06:36:32.3670550Z est = Estimator()
2025-03-28T06:36:32.3670803Z set_random_state(est)
2025-03-28T06:36:32.3671081Z old_hash = joblib.hash(est)
2025-03-28T06:36:32.3671475Z > check_estimator(
2025-03-28T06:36:32.3671766Z est, expected_failed_checks=_get_expected_failed_checks(est)
2025-03-28T06:36:32.3672037Z )
2025-03-28T06:36:32.3672196Z
2025-03-28T06:36:32.3672459Z Estimator = <class 'sklearn.linear_model._base.LinearRegression'>
2025-03-28T06:36:32.3672931Z est = LinearRegression()
2025-03-28T06:36:32.3673224Z iris = {'data': array([[5.1, 3.5, 1.4, 0.2],
2025-03-28T06:36:32.3673509Z [4.9, 3. , 1.4, 0.2],
2025-03-28T06:36:32.3673763Z [4.7, 3.2, 1.3, 0.2],
2025-03-28T06:36:32.3674269Z [4.6, 3.1, 1.5,... width (cm)', 'petal length (cm)', 'petal width (cm)'], 'filename': 'iris.csv', 'data_module': 'sklearn.datasets.data'}
2025-03-28T06:36:32.3674627Z old_hash = 'fdcbee8ed611695d1e19a9bdabd615ac'
2025-03-28T06:36:32.3674814Z
2025-03-28T06:36:32.3675204Z �[1m�[31m/io/sklearn/utils/tests/test_estimator_checks.py�[0m:919:
2025-03-28T06:36:32.3675540Z _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
2025-03-28T06:36:32.3675945Z �[1m�[31m/io/sklearn/utils/_param_validation.py�[0m:218: in wrapper
2025-03-28T06:36:32.3676248Z return func(*args, **kwargs)
2025-03-28T06:36:32.3676516Z args = (LinearRegression(),)
2025-03-28T06:36:32.3676790Z func = <function check_estimator at 0xd8591668>
2025-03-28T06:36:32.3677211Z func_sig = <Signature (estimator=None, generate_only=False, *, legacy: 'bool' = True, expected_failed_checks: 'dict[str, str] | N...al['warn'] | None" = 'warn', on_fail: "Literal['raise', 'warn'] | None" = 'raise', callback: 'Callable | None' = None)>
2025-03-28T06:36:32.3677607Z global_skip_validation = False
2025-03-28T06:36:32.3677881Z kwargs = {'expected_failed_checks': {}}
2025-03-28T06:36:32.3678234Z parameter_constraints = {'callback': [<built-in function callable>, None], 'expected_failed_checks': [<class 'dict'>, None], 'generate_only': ['boolean'], 'legacy': ['boolean'], ...}
2025-03-28T06:36:32.3678659Z params = {'callback': None, 'estimator': LinearRegression(), 'expected_failed_checks': {}, 'generate_only': False, ...}
2025-03-28T06:36:32.3678985Z prefer_skip_nested_validation = False
2025-03-28T06:36:32.3679252Z to_ignore = ['self', 'cls']
2025-03-28T06:36:32.3679647Z �[1m�[31m/io/sklearn/utils/estimator_checks.py�[0m:856: in check_estimator
2025-03-28T06:36:32.3679964Z check(estimator)
2025-03-28T06:36:32.3680221Z callback = None
2025-03-28T06:36:32.3680536Z check = functools.partial(<function check_sample_weight_equivalence_on_dense_data at 0xd8591e88>, 'LinearRegression')
2025-03-28T06:36:32.3680953Z check_result = {'check_name': 'check_sample_weight_equivalence_on_dense_data', 'estimator': LinearRegression(), 'exception': None, 'expected_to_fail': False, ...}
2025-03-28T06:36:32.3681402Z estimator = LinearRegression(positive=True)
2025-03-28T06:36:32.3681697Z expected_failed_checks = {}
2025-03-28T06:36:32.3681899Z generate_only = False
2025-03-28T06:36:32.3682090Z legacy = True
2025-03-28T06:36:32.3682266Z name = 'LinearRegression'
2025-03-28T06:36:32.3682442Z on_fail = 'raise'
2025-03-28T06:36:32.3682732Z on_skip = 'warn'
2025-03-28T06:36:32.3682922Z reason = 'Check is not expected to fail'
2025-03-28T06:36:32.3683274Z test_can_fail = False
2025-03-28T06:36:32.3683596Z test_results = [{'check_name': 'check_estimator_cloneable', 'estimator': LinearRegression(), 'exception': None, 'expected_to_fail': F...k_no_attributes_set_in_init', 'estimator': LinearRegression(), 'exception': None, 'expected_to_fail': False, ...}, ...]
2025-03-28T06:36:32.3684106Z �[1m�[31m/io/sklearn/utils/estimator_checks.py�[0m:1570: in check_sample_weight_equivalence_on_dense_data
2025-03-28T06:36:32.3684400Z _check_sample_weight_equivalence(name, estimator_orig, sparse_container=None)
2025-03-28T06:36:32.3684731Z estimator_orig = LinearRegression(positive=True)
2025-03-28T06:36:32.3684930Z name = 'LinearRegression'
2025-03-28T06:36:32.3685217Z �[1m�[31m/io/sklearn/utils/_testing.py�[0m:145: in wrapper
2025-03-28T06:36:32.3685439Z return fn(*args, **kwargs)
2025-03-28T06:36:32.3685650Z args = ('LinearRegression', LinearRegression(positive=True))
2025-03-28T06:36:32.3685891Z fn = <function _check_sample_weight_equivalence at 0xd8591de8>
2025-03-28T06:36:32.3686105Z kwargs = {'sparse_container': None}
2025-03-28T06:36:32.3686316Z self = _IgnoreWarnings(record=True)
2025-03-28T06:36:32.3686643Z �[1m�[31m/io/sklearn/utils/estimator_checks.py�[0m:1566: in _check_sample_weight_equivalence
2025-03-28T06:36:32.3686902Z assert_allclose_dense_sparse(X_pred1, X_pred2, err_msg=err_msg)
2025-03-28T06:36:32.3687138Z X = array([[0.37454012, 0.95071431, 0.73199394, 0.59865848, 0.15601864,
2025-03-28T06:36:32.3687418Z 0.15599452, 0.05808361, 0.86617615, 0.6011..., 0.98663958, 0.3742708 , 0.37064215, 0.81279957,
2025-03-28T06:36:32.3687667Z 0.94724858, 0.98600106, 0.75337819, 0.37625959, 0.08350072]])
2025-03-28T06:36:32.3687906Z X_pred1 = array([ 8.88178420e-16, 1.00000000e+00, 2.00000000e+00, 1.18549798e+00,
2025-03-28T06:36:32.3688173Z 4.06241761e+00, 1.00000000e+00, 2...5767e+00, 2.00000000e+00, -2.79936287e-02, -8.90642835e-01,
2025-03-28T06:36:32.3688439Z -8.00809991e-01, 1.00000000e+00, 1.00000000e+00])
2025-03-28T06:36:32.3688661Z X_pred2 = array([0. , 1. , 2. , 0.94186541, 1.72670876,
2025-03-28T06:36:32.3688871Z 1. , 2. , 2. , 1.8723887 , 2. ,
2025-03-28T06:36:32.3689077Z 1.50877777, 0.76107365, 1.70933257, 1. , 1. ])
2025-03-28T06:36:32.3689323Z X_repeated = array([[0.37454012, 0.95071431, 0.73199394, 0.59865848, 0.15601864,
2025-03-28T06:36:32.3689588Z 0.15599452, 0.05808361, 0.86617615, 0.6011..., 0.98663958, 0.3742708 , 0.37064215, 0.81279957,
2025-03-28T06:36:32.3689834Z 0.94724858, 0.98600106, 0.75337819, 0.37625959, 0.08350072]])
2025-03-28T06:36:32.3690073Z X_weighted = array([[0.60754485, 0.17052412, 0.06505159, 0.94888554, 0.96563203,
2025-03-28T06:36:32.3690355Z 0.80839735, 0.30461377, 0.09767211, 0.6842..., 0.69673717, 0.62894285, 0.87747201, 0.73507104,
2025-03-28T06:36:32.3690606Z 0.80348093, 0.28203457, 0.17743954, 0.75061475, 0.80683474]])
2025-03-28T06:36:32.3690894Z err_msg = 'Comparing the output of LinearRegression.predict revealed that fitting with `sample_weight` is not equivalent to fitting with removed or repeated data points.'
2025-03-28T06:36:32.3691216Z estimator_orig = LinearRegression(positive=True)
2025-03-28T06:36:32.3691435Z estimator_repeated = LinearRegression(positive=True)
2025-03-28T06:36:32.3691653Z estimator_weighted = LinearRegression(positive=True)
2025-03-28T06:36:32.3691853Z method = 'predict'
2025-03-28T06:36:32.3692029Z n_samples = 15
2025-03-28T06:36:32.3692223Z name = 'LinearRegression'
2025-03-28T06:36:32.3692420Z rng = RandomState(MT19937) at 0xCD5304A8
2025-03-28T06:36:32.3692720Z sparse_container = None
2025-03-28T06:36:32.3692923Z sw = array([3, 4, 0, 3, 1, 0, 4, 4, 0, 3, 0, 0, 3, 2, 0])
2025-03-28T06:36:32.3693289Z y = array([0, 1, 2, 2, 1, 1, 2, 2, 1, 2, 0, 0, 1, 1, 1])
2025-03-28T06:36:32.3693514Z y_repeated = array([0, 0, 0, 1, 1, 1, 2, 2, 2, 2, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2025-03-28T06:36:32.4389210Z 1, 1, 1, 1, 1])
2025-03-28T06:36:32.4392227Z y_weighted = array([1, 2, 1, 2, 1, 1, 2, 1, 0, 2, 0, 2, 0, 1, 1])
2025-03-28T06:36:32.4394205Z �[1m�[31m/io/sklearn/utils/_testing.py�[0m:283: in assert_allclose_dense_sparse
2025-03-28T06:36:32.4395241Z assert_allclose(x, y, rtol=rtol, atol=atol, err_msg=err_msg)
2025-03-28T06:36:32.4395958Z atol = 1e-09
2025-03-28T06:36:32.4396339Z err_msg = 'Comparing the output of LinearRegression.predict revealed that fitting with `sample_weight` is not equivalent to fitting with removed or repeated data points.'
2025-03-28T06:36:32.4396710Z rtol = 1e-07
2025-03-28T06:36:32.4397020Z x = array([ 8.88178420e-16, 1.00000000e+00, 2.00000000e+00, 1.18549798e+00,
2025-03-28T06:36:32.4397420Z 4.06241761e+00, 1.00000000e+00, 2...5767e+00, 2.00000000e+00, -2.79936287e-02, -8.90642835e-01,
2025-03-28T06:36:32.4397769Z -8.00809991e-01, 1.00000000e+00, 1.00000000e+00])
2025-03-28T06:36:32.4398083Z y = array([0. , 1. , 2. , 0.94186541, 1.72670876,
2025-03-28T06:36:32.4398404Z 1. , 2. , 2. , 1.8723887 , 2. ,
2025-03-28T06:36:32.4398711Z 1.50877777, 0.76107365, 1.70933257, 1. , 1. ])
2025-03-28T06:36:32.4399020Z _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
2025-03-28T06:36:32.4399360Z
2025-03-28T06:36:32.4399684Z actual = array([ 8.88178420e-16, 1.00000000e+00, 2.00000000e+00, 1.18549798e+00,
2025-03-28T06:36:32.4400047Z 4.06241761e+00, 1.00000000e+00, 2...5767e+00, 2.00000000e+00, -2.79936287e-02, -8.90642835e-01,
2025-03-28T06:36:32.4400383Z -8.00809991e-01, 1.00000000e+00, 1.00000000e+00])
2025-03-28T06:36:32.4400717Z desired = array([0. , 1. , 2. , 0.94186541, 1.72670876,
2025-03-28T06:36:32.4401016Z 1. , 2. , 2. , 1.8723887 , 2. ,
2025-03-28T06:36:32.4401305Z 1.50877777, 0.76107365, 1.70933257, 1. , 1. ])
2025-03-28T06:36:32.4401599Z rtol = 1e-07, atol = 1e-09, equal_nan = True
2025-03-28T06:36:32.4401994Z err_msg = 'Comparing the output of LinearRegression.predict revealed that fitting with `sample_weight` is not equivalent to fitting with removed or repeated data points.'
2025-03-28T06:36:32.4402357Z verbose = True
2025-03-28T06:36:32.4402649Z
2025-03-28T06:36:32.4403197Z def assert_allclose(
2025-03-28T06:36:32.4403533Z actual, desired, rtol=None, atol=0.0, equal_nan=True, err_msg="", verbose=True
2025-03-28T06:36:32.4403835Z ):
2025-03-28T06:36:32.4404125Z """dtype-aware variant of numpy.testing.assert_allclose
2025-03-28T06:36:32.4404412Z
2025-03-28T06:36:32.4404713Z This variant introspects the least precise floating point dtype
2025-03-28T06:36:32.4405098Z in the input argument and automatically sets the relative tolerance
2025-03-28T06:36:32.4405430Z parameter to 1e-4 float32 and use 1e-7 otherwise (typically float64
2025-03-28T06:36:32.4405731Z in scikit-learn).
2025-03-28T06:36:32.4406156Z
2025-03-28T06:36:32.4406440Z `atol` is always left to 0. by default. It should be adjusted manually
2025-03-28T06:36:32.4406762Z to an assertion-specific value in case there are null values expected
2025-03-28T06:36:32.4407464Z in `desired`.
2025-03-28T06:36:32.4407766Z
2025-03-28T06:36:32.4408043Z The aggregate tolerance is `atol + rtol * abs(desired)`.
2025-03-28T06:36:32.4408313Z
2025-03-28T06:36:32.4408563Z Parameters
2025-03-28T06:36:32.4408818Z ----------
2025-03-28T06:36:32.4409095Z actual : array_like
2025-03-28T06:36:32.4409358Z Array obtained.
2025-03-28T06:36:32.4409820Z desired : array_like
2025-03-28T06:36:32.4410093Z Array desired.
2025-03-28T06:36:32.4410364Z rtol : float, optional, default=None
2025-03-28T06:36:32.4410653Z Relative tolerance.
2025-03-28T06:36:32.4410943Z If None, it is set based on the provided arrays' dtypes.
2025-03-28T06:36:32.4411236Z atol : float, optional, default=0.
2025-03-28T06:36:32.4411512Z Absolute tolerance.
2025-03-28T06:36:32.4411809Z equal_nan : bool, optional, default=True
2025-03-28T06:36:32.4412101Z If True, NaNs will compare equal.
2025-03-28T06:36:32.4412731Z err_msg : str, optional, default=''
2025-03-28T06:36:32.4413109Z The error message to be printed in case of failure.
2025-03-28T06:36:32.4413412Z verbose : bool, optional, default=True
2025-03-28T06:36:32.4413734Z If True, the conflicting values are appended to the error message.
2025-03-28T06:36:32.4414013Z
2025-03-28T06:36:32.4414256Z Raises
2025-03-28T06:36:32.4414514Z ------
2025-03-28T06:36:32.4414771Z AssertionError
2025-03-28T06:36:32.4415083Z If actual and desired are not equal up to specified precision.
2025-03-28T06:36:32.4415491Z
2025-03-28T06:36:32.4415753Z See Also
2025-03-28T06:36:32.4416185Z --------
2025-03-28T06:36:32.4416466Z numpy.testing.assert_allclose
2025-03-28T06:36:32.4416900Z
2025-03-28T06:36:32.4417154Z Examples
2025-03-28T06:36:32.4417419Z --------
2025-03-28T06:36:32.4417685Z >>> import numpy as np
2025-03-28T06:36:32.4418011Z >>> from sklearn.utils._testing import assert_allclose
2025-03-28T06:36:32.4418314Z >>> x = [1e-5, 1e-3, 1e-1]
2025-03-28T06:36:32.4418599Z >>> y = np.arccos(np.cos(x))
2025-03-28T06:36:32.4418899Z >>> assert_allclose(x, y, rtol=1e-5, atol=0)
2025-03-28T06:36:32.4419379Z >>> a = np.full(shape=10, fill_value=1e-5, dtype=np.float32)
2025-03-28T06:36:32.4420007Z >>> assert_allclose(a, 1e-5)
2025-03-28T06:36:32.4420262Z """
2025-03-28T06:36:32.4420502Z dtypes = []
2025-03-28T06:36:32.4420743Z
2025-03-28T06:36:32.4421010Z actual, desired = np.asanyarray(actual), np.asanyarray(desired)
2025-03-28T06:36:32.4421324Z dtypes = [actual.dtype, desired.dtype]
2025-03-28T06:36:32.4421576Z
2025-03-28T06:36:32.4421816Z if rtol is None:
2025-03-28T06:36:32.4422102Z rtols = [1e-4 if dtype == np.float32 else 1e-7 for dtype in dtypes]
2025-03-28T06:36:32.4422404Z rtol = max(rtols)
2025-03-28T06:36:32.4422867Z
2025-03-28T06:36:32.4423151Z > np_assert_allclose(
2025-03-28T06:36:32.4423412Z actual,
2025-03-28T06:36:32.4423660Z desired,
2025-03-28T06:36:32.4423929Z rtol=rtol,
2025-03-28T06:36:32.4424278Z atol=atol,
2025-03-28T06:36:32.4424545Z equal_nan=equal_nan,
2025-03-28T06:36:32.4424804Z err_msg=err_msg,
2025-03-28T06:36:32.4425069Z verbose=verbose,
2025-03-28T06:36:32.4425335Z )
2025-03-28T06:36:32.4425695Z �[1m�[31mE AssertionError: �[0m
2025-03-28T06:36:32.4426095Z �[1m�[31mE Not equal to tolerance rtol=1e-07, atol=1e-09�[0m
2025-03-28T06:36:32.4426645Z �[1m�[31mE Comparing the output of LinearRegression.predict revealed that fitting with `sample_weight` is not equivalent to fitting with removed or repeated data points.�[0m
2025-03-28T06:36:32.4427542Z �[1m�[31mE Mismatched elements: 6 / 15 (40%)�[0m
2025-03-28T06:36:32.4427985Z �[1m�[31mE Max absolute difference among violations: 2.51014256�[0m
2025-03-28T06:36:32.4428409Z �[1m�[31mE Max relative difference among violations: 2.17024526�[0m
2025-03-28T06:36:32.4428858Z �[1m�[31mE ACTUAL: array([ 8.881784e-16, 1.000000e+00, 2.000000e+00, 1.185498e+00,�[0m
2025-03-28T06:36:32.4429327Z �[1m�[31mE 4.062418e+00, 1.000000e+00, 2.000000e+00, 2.000000e+00,�[0m
2025-03-28T06:36:32.4429765Z �[1m�[31mE 4.105658e+00, 2.000000e+00, -2.799363e-02, -8.906428e-01,�[0m
2025-03-28T06:36:32.4430365Z �[1m�[31mE -8.008100e-01, 1.000000e+00, 1.000000e+00])�[0m
2025-03-28T06:36:32.4430805Z �[1m�[31mE DESIRED: array([0. , 1. , 2. , 0.941865, 1.726709, 1. ,�[0m
2025-03-28T06:36:32.4431250Z �[1m�[31mE 2. , 2. , 1.872389, 2. , 1.508778, 0.761074,�[0m
2025-03-28T06:36:32.4431622Z �[1m�[31mE 1.709333, 1. , 1. ])�[0m
2025-03-28T06:36:32.4431810Z
2025-03-28T06:36:32.4432416Z actual = array([ 8.88178420e-16, 1.00000000e+00, 2.00000000e+00, 1.18549798e+00,
2025-03-28T06:36:32.4433326Z 4.06241761e+00, 1.00000000e+00, 2...5767e+00, 2.00000000e+00, -2.79936287e-02, -8.90642835e-01,
2025-03-28T06:36:32.4433865Z -8.00809991e-01, 1.00000000e+00, 1.00000000e+00])
2025-03-28T06:36:32.4434473Z atol = 1e-09
2025-03-28T06:36:32.4434795Z desired = array([0. , 1. , 2. , 0.94186541, 1.72670876,
2025-03-28T06:36:32.4435122Z 1. , 2. , 2. , 1.8723887 , 2. ,
2025-03-28T06:36:32.4435605Z 1.50877777, 0.76107365, 1.70933257, 1. , 1. ])
2025-03-28T06:36:32.4435932Z dtypes = [dtype('float64'), dtype('float64')]
2025-03-28T06:36:32.4436253Z equal_nan = True
2025-03-28T06:36:32.4436639Z err_msg = 'Comparing the output of LinearRegression.predict revealed that fitting with `sample_weight` is not equivalent to fitting with removed or repeated data points.'
2025-03-28T06:36:32.4437185Z rtol = 1e-07
2025-03-28T06:36:32.4439658Z verbose = True
2025-03-28T06:36:32.4439971Z
2025-03-28T06:36:32.4440407Z �[1m�[31m/io/sklearn/utils/_testing.py�[0m:237: AssertionError
Looking at the software runtime info of each I only see two differences:
- the pip version;
- the CPU model.
All other dependencies seem to match, including the openblas version inspected by threadpoolctl.
EDIT: this is wrong, the scipy version is not the same and I missed it.