-
-
Notifications
You must be signed in to change notification settings - Fork 261
Closed
Description
[flake8]
./dask_ml/linear_model/glm.py:94:-1624: W605 invalid escape sequence '\l'
./dask_ml/model_selection/_search.py:1148:17: W504 line break after binary operator
./tests/test_pca.py:376:30: W605 invalid escape sequence '\('
./tests/test_pca.py:378:25: W605 invalid escape sequence '\)'
./tests/test_pca.py:390:21: W605 invalid escape sequence '\('
./tests/test_pca.py:391:28: W605 invalid escape sequence '\)'
Exited with code 1
=================================== FAILURES ===================================
_________________________ test_pipeline_feature_union __________________________
def test_pipeline_feature_union():
iris = load_iris()
X, y = iris.data, iris.target
pca = PCA(random_state=0)
kbest = SelectKBest()
empty_union = FeatureUnion([("first", None), ("second", None)])
empty_pipeline = Pipeline([("first", None), ("second", None)])
scaling = Pipeline([("transform", ScalingTransformer())])
svc = SVC(kernel="linear", random_state=0)
pipe = Pipeline(
[
("empty_pipeline", empty_pipeline),
("scaling", scaling),
("missing", None),
(
"union",
FeatureUnion(
[
("pca", pca),
("missing", None),
("kbest", kbest),
("empty_union", empty_union),
],
transformer_weights={"pca": 0.5},
),
),
("svc", svc),
]
)
param_grid = dict(
scaling__transform__factor=[1, 2],
union__pca__n_components=[1, 2, 3],
union__kbest__k=[1, 2],
svc__C=[0.1, 1, 10],
)
gs = GridSearchCV(pipe, param_grid=param_grid, cv=3, iid=True)
> gs.fit(X, y)
tests/model_selection/dask_searchcv/test_model_selection.py:367:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/model_selection/_search.py:686: in fit
self._run_search(evaluate_candidates)
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/model_selection/_search.py:1130: in _run_search
evaluate_candidates(ParameterGrid(self.param_grid))
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/model_selection/_search.py:675: in evaluate_candidates
cv.split(X, y, groups)))
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/externals/joblib/parallel.py:983: in __call__
if self.dispatch_one_batch(iterator):
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/externals/joblib/parallel.py:825: in dispatch_one_batch
self._dispatch(tasks)
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/externals/joblib/parallel.py:782: in _dispatch
job = self._backend.apply_async(batch, callback=cb)
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/externals/joblib/_parallel_backends.py:182: in apply_async
result = ImmediateResult(func)
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/externals/joblib/_parallel_backends.py:545: in __init__
self.results = batch()
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/externals/joblib/parallel.py:261: in __call__
for func, args, kwargs in self.items]
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/externals/joblib/parallel.py:261: in <listcomp>
for func, args, kwargs in self.items]
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/model_selection/_validation.py:552: in _fit_and_score
test_scores = _score(estimator, X_test, y_test, scorer, is_multimetric)
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/model_selection/_validation.py:589: in _score
return _multimetric_score(estimator, X_test, y_test, scorer)
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/model_selection/_validation.py:619: in _multimetric_score
score = scorer(estimator, X_test, y_test)
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/metrics/scorer.py:228: in _passthrough_scorer
return estimator.score(*args, **kwargs)
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/utils/metaestimators.py:118: in <lambda>
out = lambda *args, **kwargs: self.fn(obj, *args, **kwargs)
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/pipeline.py:519: in score
Xt = transform.transform(Xt)
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/pipeline.py:834: in transform
for name, trans, weight in self._iter())
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/externals/joblib/parallel.py:983: in __call__
if self.dispatch_one_batch(iterator):
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/externals/joblib/parallel.py:825: in dispatch_one_batch
self._dispatch(tasks)
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/externals/joblib/parallel.py:782: in _dispatch
job = self._backend.apply_async(batch, callback=cb)
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/externals/joblib/_parallel_backends.py:182: in apply_async
result = ImmediateResult(func)
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/externals/joblib/_parallel_backends.py:545: in __init__
self.results = batch()
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/externals/joblib/parallel.py:261: in __call__
for func, args, kwargs in self.items]
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/externals/joblib/parallel.py:261: in <listcomp>
for func, args, kwargs in self.items]
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/pipeline.py:617: in _transform_one
res = transformer.transform(X)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = PCA(copy=True, iterated_power='auto', n_components=1, random_state=0,
svd_solver='auto', tol=0.0, whiten=False)
X = array([[-1.34536609, 1.4 ],
[-1.36108599, 1.4 ],
[-1.44888676, 1.3 ],
[-1.37... 5. ],
[ 0.78648496, 5.1 ],
[ 0.94745888, 5.3 ],
[ 0.97076437, 5.5 ]])
def transform(self, X):
"""Apply dimensionality reduction to X.
X is projected on the first principal components previously extracted
from a training set.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples is the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
Examples
--------
>>> import numpy as np
>>> from sklearn.decomposition import IncrementalPCA
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> ipca = IncrementalPCA(n_components=2, batch_size=3)
>>> ipca.fit(X)
IncrementalPCA(batch_size=3, copy=True, n_components=2, whiten=False)
>>> ipca.transform(X) # doctest: +SKIP
"""
check_is_fitted(self, ['mean_', 'components_'], all_or_any=all)
X = check_array(X)
if self.mean_ is not None:
> X = X - self.mean_
E ValueError: operands could not be broadcast together with shapes (51,2) (4,)
/opt/conda/envs/dask-ml-test/lib/python3.6/site-packages/sklearn/decomposition/base.py:130: ValueError
--------------------------- Captured stderr teardown ---------------------------
Metadata
Metadata
Assignees
Labels
No labels