diff --git a/.cirrus.star b/.cirrus.star index 495924019b265..8b3de0d10c532 100644 --- a/.cirrus.star +++ b/.cirrus.star @@ -30,6 +30,6 @@ def main(ctx): return [] if "[cd build]" in commit_msg or "[cd build cirrus]" in commit_msg: - return fs.read(arm_wheel_yaml) + return fs.read(arm_wheel_yaml) + fs.read(arm_tests_yaml) return fs.read(arm_tests_yaml) diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index b623e28d25979..a1caae9fd5e08 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -103,20 +103,6 @@ jobs: python: 311 platform_id: macosx_x86_64 - # MacOS arm64 - - os: macos-latest - python: 38 - platform_id: macosx_arm64 - - os: macos-latest - python: 39 - platform_id: macosx_arm64 - - os: macos-latest - python: 310 - platform_id: macosx_arm64 - - os: macos-latest - python: 311 - platform_id: macosx_arm64 - steps: - name: Checkout scikit-learn uses: actions/checkout@v3 @@ -221,5 +207,6 @@ jobs: # Secret variables need to be mapped to environment variables explicitly SCIKIT_LEARN_NIGHTLY_UPLOAD_TOKEN: ${{ secrets.SCIKIT_LEARN_NIGHTLY_UPLOAD_TOKEN }} SCIKIT_LEARN_STAGING_UPLOAD_TOKEN: ${{ secrets.SCIKIT_LEARN_STAGING_UPLOAD_TOKEN }} + ARTIFACTS_PATH: dist/artifact # Force a replacement if the remote file already exists run: bash build_tools/github/upload_anaconda.sh diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 4f0bd8def013e..0000000000000 --- a/.travis.yml +++ /dev/null @@ -1,93 +0,0 @@ -# Make it explicit that we favor the -# new container-based Travis workers -language: python -dist: xenial -# Only used to install cibuildwheel, CIBW_BUILD determines the python version being -# built in the docker image itself. Also: travis does not have 3.10 yet. -python: 3.9 - -cache: - apt: true - directories: - - $HOME/.cache/pip - - $HOME/.ccache - -env: - global: - - CPU_COUNT=3 - - TEST_DIR=/tmp/sklearn # Test directory for continuous integration jobs - - PYTEST_VERSION=latest - - OMP_NUM_THREADS=2 - - OPENBLAS_NUM_THREADS=2 - - SKLEARN_BUILD_PARALLEL=3 - - SKLEARN_SKIP_NETWORK_TESTS=1 - - PYTHONUNBUFFERED=1 - # Custom environment variables for the ARM wheel builder - - CIBW_BUILD_VERBOSITY=1 - - CIBW_TEST_COMMAND="bash {project}/build_tools/travis/test_wheels.sh" - - CIBW_ENVIRONMENT="CPU_COUNT=4 - OMP_NUM_THREADS=2 - OPENBLAS_NUM_THREADS=2 - SKLEARN_BUILD_PARALLEL=10 - SKLEARN_SKIP_NETWORK_TESTS=1 - PYTHONUNBUFFERED=1" - -jobs: - include: - # Linux environments to build the scikit-learn wheels for the ARM64 - # architecture and Python 3.8 and newer. This is used both at release time - # with the manual trigger in the commit message in the release branch and as - # a scheduled task to build the weekly dev build on the main branch. The - # weekly frequency is meant to avoid depleting the Travis CI credits too - # fast. - - os: linux - arch: arm64-graviton2 - dist: focal - virt: vm - group: edge - if: type = cron or commit_message =~ /\[cd build\]/ - env: - - CIBW_BUILD=cp38-manylinux_aarch64 - - BUILD_WHEEL=true - - - os: linux - arch: arm64-graviton2 - dist: focal - virt: vm - group: edge - if: type = cron or commit_message =~ /\[cd build\]/ - env: - - CIBW_BUILD=cp39-manylinux_aarch64 - - BUILD_WHEEL=true - - - os: linux - arch: arm64-graviton2 - dist: focal - virt: vm - group: edge - if: type = cron or commit_message =~ /\[cd build\]/ - env: - - CIBW_BUILD=cp310-manylinux_aarch64 - - BUILD_WHEEL=true - - - os: linux - arch: arm64-graviton2 - dist: focal - virt: vm - group: edge - if: type = cron or commit_message =~ /\[cd build\]/ - env: - - CIBW_BUILD=cp311-manylinux_aarch64 - - BUILD_WHEEL=true - -install: source build_tools/travis/install.sh || travis_terminate 1 -script: source build_tools/travis/script.sh || travis_terminate 1 -after_success: source build_tools/travis/after_success.sh || travis_terminate 1 - -notifications: - webhooks: - urls: - - https://webhooks.gitter.im/e/4ffabb4df010b70cd624 - on_success: change - on_failure: always - on_start: never diff --git a/README.rst b/README.rst index 364d45866636e..108cb331821cd 100644 --- a/README.rst +++ b/README.rst @@ -1,6 +1,6 @@ .. -*- mode: rst -*- -|Azure|_ |Travis|_ |Codecov|_ |CircleCI|_ |Nightly wheels|_ |Black|_ |PythonVersion|_ |PyPi|_ |DOI|_ |Benchmark|_ +|Azure|_ |CirrusCI|_ |Codecov|_ |CircleCI|_ |Nightly wheels|_ |Black|_ |PythonVersion|_ |PyPi|_ |DOI|_ |Benchmark|_ .. |Azure| image:: https://dev.azure.com/scikit-learn/scikit-learn/_apis/build/status/scikit-learn.scikit-learn?branchName=main .. _Azure: https://dev.azure.com/scikit-learn/scikit-learn/_build/latest?definitionId=1&branchName=main @@ -8,8 +8,8 @@ .. |CircleCI| image:: https://circleci.com/gh/scikit-learn/scikit-learn/tree/main.svg?style=shield&circle-token=:circle-token .. _CircleCI: https://circleci.com/gh/scikit-learn/scikit-learn -.. |Travis| image:: https://api.travis-ci.com/scikit-learn/scikit-learn.svg?branch=main -.. _Travis: https://app.travis-ci.com/github/scikit-learn/scikit-learn +.. |CirrusCI| image:: https://img.shields.io/cirrus/github/scikit-learn/scikit-learn/main?label=Cirrus%20CI +.. _CirrusCI: https://cirrus-ci.com/github/scikit-learn/scikit-learn/main .. |Codecov| image:: https://codecov.io/gh/scikit-learn/scikit-learn/branch/main/graph/badge.svg?token=Pk8G9gg3y9 .. _Codecov: https://codecov.io/gh/scikit-learn/scikit-learn diff --git a/build_tools/cirrus/arm_tests.yml b/build_tools/cirrus/arm_tests.yml index 2a15753cb3b30..319c727954ea4 100644 --- a/build_tools/cirrus/arm_tests.yml +++ b/build_tools/cirrus/arm_tests.yml @@ -12,6 +12,7 @@ linux_aarch64_test_task: OPENBLAS_NUM_THREADS: 2 LOCK_FILE: build_tools/cirrus/py39_conda_forge_linux-aarch64_conda.lock CONDA_PKGS_DIRS: /root/.conda/pkgs + HOME: / # $HOME is not defined in image and is required to install mambaforge ccache_cache: folder: /root/.cache/ccache conda_cache: diff --git a/build_tools/cirrus/arm_wheel.yml b/build_tools/cirrus/arm_wheel.yml index c5a644291c1c3..0829ea340a01c 100644 --- a/build_tools/cirrus/arm_wheel.yml +++ b/build_tools/cirrus/arm_wheel.yml @@ -69,3 +69,32 @@ linux_arm64_wheel_task: wheels_artifacts: path: "wheelhouse/*" + + +wheels_upload_task: + depends_on: + - macos_arm64_wheel + - linux_arm64_wheel + container: + image: continuumio/miniconda3:22.11.1 + # Artifacts are not uploaded on PRs + only_if: $CIRRUS_PR == "" + env: + # Upload tokens have been encrypted via the CirrusCI interface: + # https://cirrus-ci.org/guide/writing-tasks/#encrypted-variables + SCIKIT_LEARN_NIGHTLY_UPLOAD_TOKEN: ENCRYPTED[8f20120b18a07d8a11192b98bff1f562883558e1f4c53f8ead1577113785a4105ee6f14ad9b5dacf1803c19c4913fe1c] + SCIKIT_LEARN_STAGING_UPLOAD_TOKEN: ENCRYPTED[8fade46af37fa645e57bd1ee21683337aa369ba56f6307ce13889f1e74df94e5bdd21d323baac21e332fd87b8949659a] + ARTIFACTS_PATH: wheelhouse + upload_script: | + conda install curl unzip -y + + if [[ "$CIRRUS_CRON" == "nightly" ]]; then + export GITHUB_EVENT_NAME="schedule" + fi + + # Download and show wheels + curl https://api.cirrus-ci.com/v1/artifact/build/$CIRRUS_BUILD_ID/wheels.zip --output wheels.zip + unzip wheels.zip + ls wheelhouse + + bash build_tools/github/upload_anaconda.sh diff --git a/build_tools/github/check_wheels.py b/build_tools/github/check_wheels.py index ef9bd77254fb5..99d319cba4dc5 100644 --- a/build_tools/github/check_wheels.py +++ b/build_tools/github/check_wheels.py @@ -14,14 +14,13 @@ # plus one more for the sdist n_wheels += 1 -# aarch64 builds from travis -travis_config_path = Path.cwd() / ".travis.yml" -with travis_config_path.open("r") as f: - travis_config = yaml.safe_load(f) - -jobs = travis_config["jobs"]["include"] -travis_builds = [j for j in jobs if any("CIBW_BUILD" in env for env in j["env"])] -n_wheels += len(travis_builds) +# arm64 builds from cirrus +cirrus_path = Path.cwd() / "build_tools" / "cirrus" / "arm_wheel.yml" +with cirrus_path.open("r") as f: + cirrus_config = yaml.safe_load(f) + +n_wheels += len(cirrus_config["macos_arm64_wheel_task"]["matrix"]) +n_wheels += len(cirrus_config["linux_arm64_wheel_task"]["matrix"]) dist_files = list(Path("dist").glob("**/*")) n_dist_files = len(dist_files) diff --git a/build_tools/github/upload_anaconda.sh b/build_tools/github/upload_anaconda.sh index 13e8420e3cc5a..60cab7f8dcf4a 100755 --- a/build_tools/github/upload_anaconda.sh +++ b/build_tools/github/upload_anaconda.sh @@ -18,5 +18,5 @@ source activate upload conda install -y anaconda-client # Force a replacement if the remote file already exists -anaconda -t $ANACONDA_TOKEN upload --force -u $ANACONDA_ORG dist/artifact/* +anaconda -t $ANACONDA_TOKEN upload --force -u $ANACONDA_ORG $ARTIFACTS_PATH/* echo "Index: https://pypi.anaconda.org/$ANACONDA_ORG/simple" diff --git a/build_tools/travis/after_success.sh b/build_tools/travis/after_success.sh deleted file mode 100755 index a09a4013ed946..0000000000000 --- a/build_tools/travis/after_success.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash - -# This script is meant to be called by the "after_success" step -# defined in ".travis.yml". In particular, we upload the wheels -# of the ARM64 architecture for the continuous deployment jobs. - -set -e - -# The wheels cannot be uploaded on PRs -if [[ $BUILD_WHEEL == true && $TRAVIS_EVENT_TYPE != pull_request ]]; then - # Nightly upload token and staging upload token are set in - # Travis settings (originally generated at Anaconda cloud) - if [[ $TRAVIS_EVENT_TYPE == cron ]]; then - ANACONDA_ORG="scipy-wheels-nightly" - ANACONDA_TOKEN="$SCIKIT_LEARN_NIGHTLY_UPLOAD_TOKEN" - else - ANACONDA_ORG="scikit-learn-wheels-staging" - ANACONDA_TOKEN="$SCIKIT_LEARN_STAGING_UPLOAD_TOKEN" - fi - - MINICONDA_URL="https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-aarch64.sh" - wget $MINICONDA_URL -O miniconda.sh - MINICONDA_PATH=$HOME/miniconda - chmod +x miniconda.sh && ./miniconda.sh -b -p $MINICONDA_PATH - - # Install Python 3.8 because of a bug with Python 3.9 - export PATH=$MINICONDA_PATH/bin:$PATH - conda create -n upload -y python=3.8 - source activate upload - conda install -y anaconda-client - - # Force a replacement if the remote file already exists - anaconda -t $ANACONDA_TOKEN upload --force -u $ANACONDA_ORG wheelhouse/*.whl - echo "Index: https://pypi.anaconda.org/$ANACONDA_ORG/simple" -fi diff --git a/build_tools/travis/install.sh b/build_tools/travis/install.sh deleted file mode 100755 index 178260c8dabcb..0000000000000 --- a/build_tools/travis/install.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -# This script is meant to be called by the "install" step -# defined in the ".travis.yml" file. In particular, it is -# important that we call to the right installation script. - -if [[ $BUILD_WHEEL == true ]]; then - source build_tools/travis/install_wheels.sh || travis_terminate 1 -else - source build_tools/travis/install_main.sh || travis_terminate 1 -fi diff --git a/build_tools/travis/install_main.sh b/build_tools/travis/install_main.sh deleted file mode 100755 index c0795139859bb..0000000000000 --- a/build_tools/travis/install_main.sh +++ /dev/null @@ -1,66 +0,0 @@ -#!/bin/bash - -# Travis clone "scikit-learn/scikit-learn" repository into -# a local repository. We use a cached directory with three -# scikit-learn repositories (one for each matrix entry for -# non continuous deployment jobs) from which we pull local -# Travis repository. This allows us to keep build artifact -# for GCC + Cython, and gain time. - -set -e - -echo "CPU Arch: $TRAVIS_CPU_ARCH." - -# Import "get_dep" -source build_tools/shared.sh - -echo "List files from cached directories." -echo "pip:" -ls $HOME/.cache/pip - -export CC=/usr/lib/ccache/gcc -export CXX=/usr/lib/ccache/g++ - -# Useful for debugging how ccache is used -# export CCACHE_LOGFILE=/tmp/ccache.log - -# 60MB are (more or less) used by .ccache, when -# compiling from scratch at the time of writing -ccache --max-size 100M --show-stats - -# Deactivate the default virtual environment -# to setup a conda-based environment instead -deactivate - -MINICONDA_URL="https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-aarch64.sh" - -# Install Miniconda -wget $MINICONDA_URL -O miniconda.sh -MINICONDA_PATH=$HOME/miniconda -chmod +x miniconda.sh && ./miniconda.sh -b -p $MINICONDA_PATH -export PATH=$MINICONDA_PATH/bin:$PATH -conda update --yes conda - -# Create environment and install dependencies -conda create -n testenv --yes python=3.7 - -source activate testenv -conda install -y scipy numpy pandas cython -pip install joblib threadpoolctl - -pip install $(get_dep pytest $PYTEST_VERSION) pytest-xdist - -# Build scikit-learn in this script to collapse the -# verbose build output in the Travis output when it -# succeeds -python --version -python -c "import numpy; print(f'numpy {numpy.__version__}')" -python -c "import scipy; print(f'scipy {scipy.__version__}')" - -pip install -e . -python setup.py develop - -ccache --show-stats - -# Useful for debugging how ccache is used -# cat $CCACHE_LOGFILE diff --git a/build_tools/travis/install_wheels.sh b/build_tools/travis/install_wheels.sh deleted file mode 100755 index 0f6cdf256e71b..0000000000000 --- a/build_tools/travis/install_wheels.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -python -m pip install cibuildwheel || travis_terminate $? -python -m cibuildwheel --output-dir wheelhouse || travis_terminate $? diff --git a/build_tools/travis/script.sh b/build_tools/travis/script.sh deleted file mode 100755 index 6e8b7e3deaee1..0000000000000 --- a/build_tools/travis/script.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -# This script is meant to be called by the "script" step defined -# in the ".travis.yml" file. While this step is forbidden by the -# continuous deployment jobs, we have to execute the scripts for -# testing the continuous integration jobs. - -if [[ $BUILD_WHEEL != true ]]; then - # This trick will make Travis terminate the continuation of the pipeline - bash build_tools/travis/test_script.sh || travis_terminate 1 - bash build_tools/travis/test_docs.sh || travis_terminate 1 -fi diff --git a/build_tools/travis/test_docs.sh b/build_tools/travis/test_docs.sh deleted file mode 100755 index 4907dee1c9789..0000000000000 --- a/build_tools/travis/test_docs.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -set -e - -if [[ $TRAVIS_CPU_ARCH != arm64 ]]; then - # Faster run of the documentation tests - PYTEST="pytest -n $CPU_COUNT" make test-doc -fi diff --git a/build_tools/travis/test_script.sh b/build_tools/travis/test_script.sh deleted file mode 100755 index 1551ed858d1a1..0000000000000 --- a/build_tools/travis/test_script.sh +++ /dev/null @@ -1,39 +0,0 @@ -#!/bin/bash - -set -e - -python --version -python -c "import numpy; print(f'numpy {numpy.__version__}')" -python -c "import scipy; print(f'scipy {scipy.__version__}')" -python -c "\ -try: - import pandas - print(f'pandas {pandas.__version__}') -except ImportError: - pass -" -python -c "import joblib; print(f'{joblib.cpu_count()} CPUs')" -python -c "import platform; print(f'{platform.machine()}')" - -TEST_CMD="pytest --showlocals --durations=20 --pyargs" - -# Run the tests on the installed version -mkdir -p $TEST_DIR - -# Copy "setup.cfg" for the test settings -cp setup.cfg $TEST_DIR -cd $TEST_DIR - -if [[ $TRAVIS_CPU_ARCH == arm64 ]]; then - # Faster run of the source code tests - TEST_CMD="$TEST_CMD -n $CPU_COUNT" - - # Remove the option to test the docstring - sed -i -e 's/--doctest-modules//g' setup.cfg -fi - -if [[ -n $CHECK_WARNINGS ]]; then - TEST_CMD="$TEST_CMD -Werror::DeprecationWarning -Werror::FutureWarning -Werror::numpy.VisibleDeprecationWarning" -fi - -$TEST_CMD sklearn diff --git a/build_tools/travis/test_wheels.sh b/build_tools/travis/test_wheels.sh deleted file mode 100755 index 11d4bd73cedd7..0000000000000 --- a/build_tools/travis/test_wheels.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -pip install --upgrade pip || travis_terminate $? -pip install pytest pytest-xdist || travis_terminate $? - -# Test that there are no links to system libraries in the threadpoolctl -# section of the show_versions output. -python -c "import sklearn; sklearn.show_versions()" || travis_terminate $? -python -m pytest -n $CPU_COUNT --pyargs sklearn || travis_terminate $? diff --git a/doc/about.rst b/doc/about.rst index a5c5bcea1ea69..0eca85ac1fa44 100644 --- a/doc/about.rst +++ b/doc/about.rst @@ -619,7 +619,7 @@ Infrastructure support ---------------------- - We would also like to thank `Microsoft Azure - `_, `Travis Cl `_, + `_, `Cirrus Cl `_, `CircleCl `_ for free CPU time on their Continuous Integration servers, and `Anaconda Inc. `_ for the storage they provide for our staging and nightly builds. diff --git a/doc/developers/maintainer.rst b/doc/developers/maintainer.rst index 4337f2b5e0545..ebd832b698ea9 100644 --- a/doc/developers/maintainer.rst +++ b/doc/developers/maintainer.rst @@ -342,30 +342,6 @@ updates can be made by pushing to master (for /dev) or a release branch like 0.99.X, from which Circle CI builds and uploads the documentation automatically. -Travis Cron jobs ----------------- - -From ``_: Travis CI cron jobs work -similarly to the cron utility, they run builds at regular scheduled intervals -independently of whether any commits were pushed to the repository. Cron jobs -always fetch the most recent commit on a particular branch and build the project -at that state. Cron jobs can run daily, weekly or monthly, which in practice -means up to an hour after the selected time span, and you cannot set them to run -at a specific time. - -For scikit-learn, Cron jobs are used for builds that we do not want to run in -each PR. As an example the build with the dev versions of numpy and scipy is -run as a Cron job. Most of the time when this numpy-dev build fail, it is -related to a numpy change and not a scikit-learn one, so it would not make sense -to blame the PR author for the Travis failure. - -The definition of what gets run in the Cron job is done in the .travis.yml -config file, exactly the same way as the other Travis jobs. We use a ``if: type -= cron`` filter in order for the build to be run only in Cron jobs. - -The branch targeted by the Cron job and the frequency of the Cron job is set -via the web UI at https://www.travis-ci.org/scikit-learn/scikit-learn/settings. - Experimental features --------------------- diff --git a/sklearn/datasets/tests/test_20news.py b/sklearn/datasets/tests/test_20news.py index 4244dd7865945..e30348c894559 100644 --- a/sklearn/datasets/tests/test_20news.py +++ b/sklearn/datasets/tests/test_20news.py @@ -1,6 +1,6 @@ """Test the 20news downloader, if the data is available, or if specifically requested via environment variable -(e.g. for travis cron job).""" +(e.g. for CI jobs).""" from functools import partial from unittest.mock import patch diff --git a/sklearn/datasets/tests/test_california_housing.py b/sklearn/datasets/tests/test_california_housing.py index 82a321e96a8d6..495becccd820f 100644 --- a/sklearn/datasets/tests/test_california_housing.py +++ b/sklearn/datasets/tests/test_california_housing.py @@ -1,6 +1,6 @@ """Test the california_housing loader, if the data is available, or if specifically requested via environment variable -(e.g. for travis cron job).""" +(e.g. for CI jobs).""" import pytest from sklearn.datasets.tests.test_common import check_return_X_y diff --git a/sklearn/datasets/tests/test_covtype.py b/sklearn/datasets/tests/test_covtype.py index bbdd395a847f4..aa2bd30a2ee8a 100644 --- a/sklearn/datasets/tests/test_covtype.py +++ b/sklearn/datasets/tests/test_covtype.py @@ -1,6 +1,6 @@ """Test the covtype loader, if the data is available, or if specifically requested via environment variable -(e.g. for travis cron job).""" +(e.g. for CI jobs).""" from functools import partial import pytest from sklearn.datasets.tests.test_common import check_return_X_y diff --git a/sklearn/datasets/tests/test_kddcup99.py b/sklearn/datasets/tests/test_kddcup99.py index b935da3a26add..b3ee779aed675 100644 --- a/sklearn/datasets/tests/test_kddcup99.py +++ b/sklearn/datasets/tests/test_kddcup99.py @@ -1,6 +1,6 @@ """Test kddcup99 loader, if the data is available, or if specifically requested via environment variable -(e.g. for travis cron job). +(e.g. for CI jobs). Only 'percent10' mode is tested, as the full data is too big to use in unit-testing. diff --git a/sklearn/datasets/tests/test_olivetti_faces.py b/sklearn/datasets/tests/test_olivetti_faces.py index 7d11516b0426c..18fceb0ed8b0e 100644 --- a/sklearn/datasets/tests/test_olivetti_faces.py +++ b/sklearn/datasets/tests/test_olivetti_faces.py @@ -1,6 +1,6 @@ """Test Olivetti faces fetcher, if the data is available, or if specifically requested via environment variable -(e.g. for travis cron job).""" +(e.g. for CI jobs).""" import numpy as np diff --git a/sklearn/datasets/tests/test_rcv1.py b/sklearn/datasets/tests/test_rcv1.py index cdc9f02c010c5..ac5c29e19cd25 100644 --- a/sklearn/datasets/tests/test_rcv1.py +++ b/sklearn/datasets/tests/test_rcv1.py @@ -1,6 +1,6 @@ """Test the rcv1 loader, if the data is available, or if specifically requested via environment variable -(e.g. for travis cron job).""" +(e.g. for CI jobs).""" import scipy.sparse as sp import numpy as np diff --git a/sklearn/utils/_testing.py b/sklearn/utils/_testing.py index 21d1352439ccb..482d3ea818563 100644 --- a/sklearn/utils/_testing.py +++ b/sklearn/utils/_testing.py @@ -392,9 +392,6 @@ def set_random_state(estimator, random_state=0): import pytest skip_if_32bit = pytest.mark.skipif(_IS_32BIT, reason="skipped on 32bit platforms") - skip_travis = pytest.mark.skipif( - os.environ.get("TRAVIS") == "true", reason="skip on travis" - ) fails_if_pypy = pytest.mark.xfail(IS_PYPY, reason="not compatible with PyPy") fails_if_unstable_openblas = pytest.mark.xfail( _in_unstable_openblas_configuration(),