8000 CI Build wheels for the ARM64 architecture (#18782) · scikit-learn/scikit-learn@95128d3 · GitHub
[go: up one dir, main page]

Skip to content

Commit 95128d3

Browse files
authored
CI Build wheels for the ARM64 architecture (#18782)
1 parent 58f9143 commit 95128d3

11 files changed

+255
-238
lines changed

.travis.yml

Lines changed: 70 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,58 +1,101 @@
1-
# make it explicit that we favor the new container-based travis workers
1+
# Make it explicit that we favor the
2+
# new container-based Travis workers
23
language: python
4+
dist: xenial
35

46
cache:
57
apt: true
68
directories:
7-
- $HOME/.cache/pip
8-
- $HOME/.ccache
9-
10-
dist: xenial
9+
- $HOME/.cache/pip
10+
- $HOME/.ccache
1111

1212
env:
1313
global:
14-
# Directory where tests are run from
15-
- TEST_DIR=/tmp/sklearn
14+
- CPU_COUNT=3
15+
- TEST_DIR=/tmp/sklearn # Test directory for continuous integration jobs
16+
- PYTEST_VERSION=latest
1617
- OMP_NUM_THREADS=2
1718
- OPENBLAS_NUM_THREADS=2
18-
- PYTEST_VERSION=latest
19+
- SKLEARN_BUILD_PARALLEL=3
20+
- SKLEARN_SKIP_NETWORK_TESTS=1
21+
# Custom environment variables for the ARM wheel builder
22+
- CIBW_BUILD_VERBOSITY=1
23+
- CIBW_TEST_REQUIRES="pytest pytest-xdist threadpoolctl"
24+
- CIBW_TEST_COMMAND="bash {project}/build_tools/travis/test_wheels.sh"
25+
- CIBW_ENVIRONMENT="CPU_COUNT=8
26+
OMP_NUM_THREADS=2
27+
OPENBLAS_NUM_THREADS=2
28+
SKLEARN_BUILD_PARALLEL=8
29+
SKLEARN_SKIP_NETWORK_TESTS=1"
30+
# Nightly upload token and staging upload token are set in Travis settings
31+
- SCIKIT_LEARN_NIGHTLY_UPLOAD_TOKEN=__token__
32+
- SCIKIT_LEARN_STAGING_UPLOAD_TOKEN=__token__
1933

20-
matrix:
34+
jobs:
2135
include:
22-
# Linux environment to test scikit-learn against numpy and scipy master
23-
# installed from their CI wheels in a virtualenv with the Python
24-
# interpreter provided by travis.
36+
# Linux environment to test scikit-learn against NumPy and SciPy
37+
# master installed from their continuous integration wheels in a
38+
# virtual environment with Python interpreter provided by Travis.
2539
- python: 3.7
2640
env:
27-
- CHECK_WARNINGS="true"
28-
- CI_CPU_COUNT="3"
41+
- CHECK_WARNINGS=true
2942
if: type = cron OR commit_message =~ /\[scipy-dev\]/
3043

31-
# As above but build scikit-learn with Intel C compiler (ICC).
3244
- python: 3.7
3345
env:
34-
- CHECK_WARNING="true"
35-
- BUILD_WITH_ICC="true"
36-
- CI_CPU_COUNT="3"
46+
- CHECK_WARNING=true
47+
- BUILD_WITH_ICC=true
3748
if: type = cron OR commit_message =~ /\[icc-build\]/
3849

3950
- python: 3.7
40-
env:
41-
- CI_CPU_COUNT="8"
4251
os: linux
4352
arch: arm64
4453
if: type = cron OR commit_message =~ /\[arm64\]/
54+
env:
55+
- CPU_COUNT=8
56+
57+
# Linux environments to build the scikit-learn wheels
58+
# for the ARM64 arquitecture and Python 3.6 and newer
59+
- python: 3.6
60+
os: linux
61+
arch: arm64
62+
if: type = cron or commit_message =~ /\[cd build\]/
63+
env:
64+
- BUILD_WHEEL=true
65+
- CIBW_BUILD=cp36-manylinux_aarch64
66+
67+
- python: 3.7
68+
os: linux
69+
arch: arm64
70+
if: type = cron or commit_message =~ /\[cd build\]/
71+
env:
72+
- BUILD_WHEEL=true
73+
- CIBW_BUILD=cp37-manylinux_aarch64
74+
75+
- python: 3.8
76+
os: linux
77+
arch: arm64
78+
if: type = cron or commit_message =~ /\[cd build\]/
79+
env:
80+
- BUILD_WHEEL=true
81+
- CIBW_BUILD=cp38-manylinux_aarch64
82+
83+
- python: 3.9
84+
os: linux
85+
arch: arm64
86+
if: type = cron or commit_message =~ /\[cd build\]/
87+
env:
88+
- BUILD_WHEEL=true
89+
- CIBW_BUILD=cp39-manylinux_aarch64
4590

4691
install: source build_tools/travis/install.sh
47-
script:
48-
- bash build_tools/travis/test_script.sh || travis_terminate 1
49-
- bash build_tools/travis/test_docs.sh || travis_terminate 1
50-
- bash build_tools/travis/test_pytest_soft_dependency.sh || travis_terminate 1
92+
script: source build_tools/travis/script.sh
5193
after_success: source build_tools/travis/after_success.sh
94+
5295
notifications:
5396
webhooks:
5497
urls:
5598
- https://webhooks.gitter.im/e/4ffabb4df010b70cd624
56-
on_success: change # options: [always|never|change] default: always
57-
on_failure: always # options: [always|never|change] default: always
58-
on_start: never # options: [always|never|change] default: always
99+
on_success: change
100+
on_failure: always
101+
on_start: never

build_tools/travis/after_success.sh

Lines changed: 17 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,24 @@
11
#!/bin/bash
2-
# This script is meant to be called by the "after_success" step defined in
3-
# .travis.yml. See https://docs.travis-ci.com/ for more details.
42

5-
# License: 3-clause BSD
3+
# This script is meant to be called by the "after_success" step
4+
# defined in ".travis.yml". In particular, we upload the wheels
5+
# of the ARM64 architecture for the continuous deployment jobs.
66

77
set -e
88

9-
if [[ "$COVERAGE" == "true" ]]; then
10-
# Need to run codecov from a git checkout, so we copy .coverage
11-
# from TEST_DIR where pytest has been run
12-
cp $TEST_DIR/.coverage $TRAVIS_BUILD_DIR
9+
# The wheels cannot be uploaded on PRs
10+
if [[ $BUILD_WHEEL == true && $TRAVIS_EVENT_TYPE != pull_request ]]; then
11+
if [ $TRAVIS_EVENT_TYPE == cron ]; then
12+
ANACONDA_ORG="scipy-wheels-nightly"
13+
ANACONDA_TOKEN="$SCIKIT_LEARN_NIGHTLY_UPLOAD_TOKEN"
14+
else
15+
ANACONDA_ORG="scikit-learn-wheels-staging"
16+
ANACONDA_TOKEN="$SCIKIT_LEARN_STAGING_UPLOAD_TOKEN"
17+
fi
1318

14-
# Ignore codecov failures as the codecov server is not
15-
# very reliable but we don't want travis to report a failure
16-
# in the github UI just because the coverage report failed to
17-
# be published.
18-
codecov --root $TRAVIS_BUILD_DIR || echo "codecov upload failed"
19+
pip install git+https://github.com/Anaconda-Server/anaconda-client
20+
21+
# Force a replacement if the remote file already exists
22+
anaconda -t $ANACONDA_TOKEN upload --force -u $ANACONDA_ORG wheelhouse/*.whl
23+
echo "Index: https://pypi.anaconda.org/$ANACONDA_ORG/simple"
1924
fi

build_tools/travis/install.sh

100755100644
Lines changed: 6 additions & 104 deletions
Original file line numberDiff line numberDiff line change
@@ -1,111 +1,13 @@
11
#!/bin/bash
2-
# This script is meant to be called by the "install" step defined in
3-
# .travis.yml. See https://docs.travis-ci.com/ for more details.
4-
# The behavior of the script is controlled by environment variabled defined
5-
# in the .travis.yml in the top level folder of the project.
62

7-
# License: 3-clause BSD
8-
9-
# Travis clone scikit-learn/scikit-learn repository in to a local repository.
10-
# We use a cached directory with three scikit-learn repositories (one for each
11-
# matrix entry) from which we pull from local Travis repository. This allows
12-
# us to keep build artefact for gcc + cython, and gain time
3+
# This script is meant to be called by the "install" step
4+
# defined in the ".travis.yml" file. In particular, it is
5+
# important that we call to the right installation script.
136

147
set -e
158

16-
# Fail fast
17-
echo "CPU Arch: ${TRAVIS_CPU_ARCH}"
18-
19-
# jq is used in travis_fastfail.sh, it's already pre-installed in non arm64
20-
# environments
21-
sudo apt-get install jq
22-
23-
build_tools/travis/travis_fastfail.sh
24-
25-
# Imports get_dep
26-
source build_tools/shared.sh
27-
28-
echo "List files from cached directories"
29-
echo "pip:"
30-
ls $HOME/.cache/pip
31-
32-
export CC=/usr/lib/ccache/gcc
33-
export CXX=/usr/lib/ccache/g++
34-
# Useful for debugging how ccache is used
35-
# export CCACHE_LOGFILE=/tmp/ccache.log
36-
# ~60M is used by .ccache when compiling from scratch at the time of writing
37-
ccache --max-size 100M --show-stats
38-
39-
# Deactivate the travis-provided virtual environment and setup a
40-
# conda-based environment instead
41-
# If Travvis has language=generic, deactivate does not exist. `|| :` will pass.
42-
deactivate || :
43-
44-
45-
# Install miniconda
46-
if [[ "$TRAVIS_CPU_ARCH" == "arm64" ]]; then
47-
wget https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-aarch64.sh -O miniconda.sh
9+
if [[ $BUILD_WHEEL == true ]]; then
10+
source build_tools/travis/install_wheels.sh
4811
else
49-
fname=Miniconda3-latest-Linux-x86_64.sh
50-
wget https://repo.continuum.io/miniconda/$fname -O miniconda.sh
12+
source build_tools/travis/install_master.sh
5113
fi
52-
MINICONDA_PATH=$HOME/miniconda
53-
chmod +x miniconda.sh && ./miniconda.sh -b -p $MINICONDA_PATH
54-
export PATH=$MINICONDA_PATH/bin:$PATH
55-
conda update --yes conda
56-
57-
# Create environment and install dependencies
58-
conda create -n testenv --yes python=3.7
59-
60-
source activate testenv
61-
62-
if [[ "$TRAVIS_CPU_ARCH" == "amd64" ]]; then
63-
pip install --upgrade pip setuptools
64-
echo "Installing numpy and scipy master wheels"
65-
dev_anaconda_url=https://pypi.anaconda.org/scipy-wheels-nightly/simple
66-
pip install --pre --upgrade --timeout=60 --extra-index $dev_anaconda_url numpy scipy pandas
67-
pip install --pre cython
68-
echo "Installing joblib master"
69-
pip install https://github.com/joblib/joblib/archive/master.zip
70-
echo "Installing pillow master"
71-
pip install https://github.com/python-pillow/Pillow/archive/master.zip
72-
else
73-
conda install -y scipy numpy pandas cython
74-
pip install joblib threadpoolctl
75-
fi
76-
77-
pip install $(get_dep pytest $PYTEST_VERSION) pytest-cov pytest-xdist
78-
79-
# Build scikit-learn in the install.sh script to collapse the verbose
80-
# build output in the travis output when it succeeds.
81-
python --version
82-
python -c "import numpy; print('numpy %s' % numpy.__version__)"
83-
python -c "import scipy; print('scipy %s' % scipy.__version__)"
84-
85-
if [[ "$BUILD_WITH_ICC" == "true" ]]; then
86-
wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB
87-
sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB
88-
rm GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB
89-
sudo add-apt-repository "deb https://apt.repos.intel.com/oneapi all main"
90-
sudo apt-get update
91-
sudo apt-get install intel-oneapi-icc
92-
source /opt/intel/oneapi/setvars.sh
93-
94-
# The build_clib command is implicitly used to build libsvm-skl. To compile
95-
# with a different compiler we also need to specify the compiler for this
96-
# command.
97-
python setup.py build_ext --compiler=intelem -i -j "${CI_CPU_COUNT}" build_clib --compiler=intelem
98-
else
99-
# Use setup.py instead of `pip install -e .` to be able to pass the -j flag
100-
# to speed-up the building multicore CI machines.
101-
python setup.py build_ext --inplace -j "${CI_CPU_COUNT}"
102-
fi
103-
104-
python setup.py develop
105-
106-
ccache --show-stats
107-
# Useful for debugging how ccache is used
108-
# cat $CCACHE_LOGFILE
109-
110-
# fast fail
111-
build_tools/travis/travis_fastfail.sh

build_tools/travis/install_master.sh

Lines changed: 102 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,102 @@
1+
#!/bin/bash
2+
3+
# Travis clone "scikit-learn/scikit-learn" repository into
4+
# a local repository. We use a cached directory with three
5+
# scikit-learn repositories (one for each matrix entry for
6+
# non continuous deployment jobs) from which we pull local
7+
# Travis repository. This allows us to keep build artifact
8+
# for GCC + Cython, and gain time.
9+
10+
set -e
11+
12+
echo "CPU Arch: $TRAVIS_CPU_ARCH."
13+
14+
# Import "get_dep"
15+
source build_tools/shared.sh
16+
17+
echo "List files from cached directories."
18+
echo "pip:"
19+
ls $HOME/.cache/pip
20+
21+
export CC=/usr/lib/ccache/gcc
22+
export CXX=/usr/lib/ccache/g++
23+
24+
# Useful for debugging how ccache is used
25+
# export CCACHE_LOGFILE=/tmp/ccache.log
26+
27+
# 60MB are (more or less) used by .ccache, when
28+
# compiling from scratch at the time of writing
29+
ccache --max-size 100M --show-stats
30+
31+
# Deactivate the default virtual environment
32+
# to setup a conda-based environment instead
33+
deactivate
34+
35+
if [[ $TRAVIS_CPU_ARCH == arm64 ]]; then
36+
# Different Miniconda URL for ARM64 architectures
37+
MINICONDA_URL="https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-aarch64.sh"
38+
else
39+
MINICONDA_URL="https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh"
40+
fi
41+
42+
# Install Miniconda
43+
wget $MINICONDA_URL -O miniconda.sh
44+
MINICONDA_PATH=$HOME/miniconda
45+
chmod +x miniconda.sh && ./miniconda.sh -b -p $MINICONDA_PATH
46+
export PATH=$MINICONDA_PATH/bin:$PATH
47+
conda update --yes conda
48+
49+
# Create environment and install dependencies
50+
conda create -n testenv --yes python=3.7
51+
52+
source activate testenv
53+
54+
if [[ $TRAVIS_CPU_ARCH == amd64 ]]; then
55+
echo "Upgrading pip and setuptools."
56+
pip install --upgrade pip setuptools
57+
echo "Installing numpy, scipy and pandas master wheels."
58+
dev_anaconda_url=https://pypi.anaconda.org/scipy-wheels-nightly/simple
59+
pip install --pre --upgrade --timeout=60 --extra-index $dev_anaconda_url numpy scipy pandas
60+
echo "Installing cython pre-release wheels."
61+
pip install --pre cython
62+
echo "Installing joblib master."
63+
pip install https://github.com/joblib/joblib/archive/master.zip
64+
echo "Installing pillow master."
65+
pip install https://github.com/python-pillow/Pillow/archive/master.zip
66+
else
67+
conda install -y scipy numpy pandas cython
68+
pip install joblib threadpoolctl
69+
fi
70+
71+
pip install $(get_dep pytest $PYTEST_VERSION) pytest-xdist
72+
73+
# Build scikit-learn in this script to collapse the
74+
# verbose build output in the Travis output when it
75+
# succeeds
76+
python --version
77+
python -c "import numpy; print(f'numpy {numpy.__version__}')"
78+
python -c "import scipy; print(f'scipy {scipy.__version__}')"
79+
80+
if [[ $BUILD_WITH_ICC == true ]]; then
81+
wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB
82+
sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB
83+
rm GPG-PUB-KEY-INTEL-SW-PRODUCTS-2023.PUB
84+
sudo add-apt-repository "deb https://apt.repos.intel.com/oneapi all main"
85+
sudo apt-get update
86+
sudo apt-get install intel-oneapi-icc
87+
source /opt/intel/oneapi/setvars.sh
88+
89+
# The "build_clib" command is implicitly used to build "libsvm-skl".
90+
# To compile with a different compiler, we also need to specify the
91+
# compiler for this command
92+
python setup.py build_ext --compiler=intelem -i build_clib --compiler=intelem
93+
else
94+
pip install -e .
95+
fi
96+
97+
python setup.py develop
98+
99+
ccache --show-stats
100+
101+
# Useful for debugging how ccache is used
102+
# cat $CCACHE_LOGFILE

build_tools/travis/install_wheels.sh

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
#!/bin/bash
2+
3+
set -e
4+
5+
python -m pip install cibuildwheel
6+
python -m cibuildwheel --output-dir wheelhouse

0 commit comments

Comments
 (0)
0