8000 CI: move Linux and MacOS Azure builds to conda lock files (#22448) · scikit-learn/scikit-learn@f862129 · GitHub
[go: up one dir, main page]

Skip to content

Commit f862129

Browse files
lesteveogriselthomasjpfan
authored
CI: move Linux and MacOS Azure builds to conda lock files (#22448)
Co-authored-by: Olivier Grisel <olivier.grisel@ensta.org> Co-authored-by: Thomas J. Fan <thomasjpfan@gmail.com>
1 parent a7bbba1 commit f862129

29 files changed

+1796
-167
lines changed

azure-pipelines.yml

Lines changed: 15 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ jobs:
6363
matrix:
6464
pylatest_pip_scipy_dev:
6565
DISTRIB: 'conda-pip-scipy-dev'
66-
PYTHON_VERSION: '*'
66+
LOCK_FILE: './build_tools/azure/pylatest_pip_scipy_dev_linux-64_conda.lock'
6767
CHECK_WARNINGS: 'true'
6868
CHECK_PYTEST_SOFT_DEPENDENCY: 'true'
6969
TEST_DOCSTRINGS: 'true'
@@ -103,6 +103,7 @@ jobs:
103103
matrix:
104104
pylatest_pip_nogil:
105105
DISTRIB: 'pip-nogil'
106+
LOCK_FILE: './build_tools/azure/python_nogil_lock.txt'
106107
COVERAGE: 'false'
107108

108109
# Check compilation with intel C++ compiler (ICC)
@@ -122,9 +123,7 @@ jobs:
122123
matrix:
123124
pylatest_conda_forge_mkl:
124125
DISTRIB: 'conda'
125-
CONDA_CHANNEL: 'conda-forge'
126-
PYTHON_VERSION: '*'
127-
BLAS: 'mkl'
126+
LOCK_FILE: 'build_tools/azure/pylatest_conda_forge_mkl_no_coverage_linux-64_conda.lock'
128127
COVERAGE: 'false'
129128
BUILD_WITH_ICC: 'true'
130129

@@ -144,10 +143,9 @@ jobs:
144143
)
145144
matrix:
146145
pypy3:
147-
DISTRIB: 'conda-mamba-pypy3'
148-
DOCKER_CONTAINER: 'condaforge/mambaforge-pypy3:4.10.3-5'
149-
PILLOW_VERSION: 'none'
150-
PANDAS_VERSION: 'none'
146+
DOCKER_CONTAINER: 'condaforge/miniforge3:4.10.3-5'
147+
DISTRIB: 'conda-pypy3'
148+
LOCK_FILE: './build_tools/azure/pypy3_linux-64_conda.lock'
151149

152150
# Will run all the time regardless of linting outcome.
153151
- template: build_tools/azure/posix.yml
@@ -163,9 +161,7 @@ jobs:
163161
matrix:
164162
pylatest_conda_forge_mkl:
165163
DISTRIB: 'conda'
166-
CONDA_CHANNEL: 'conda-forge'
167-
PYTHON_VERSION: '*'
168-
BLAS: 'mkl'
164+
LOCK_FILE: './build_tools/azure/pylatest_conda_forge_mkl_linux-64_conda.lock'
169165
COVERAGE: 'true'
170166
SHOW_SHORT_SUMMARY: 'true'
171167
SKLEARN_TESTS_GLOBAL_RANDOM_SEED: '42' # default global random seed
@@ -184,9 +180,7 @@ jobs:
184180
matrix:
185181
py38_conda_forge_openblas_ubuntu_1804:
186182
DISTRIB: 'conda'
187-
CONDA_CHANNEL: 'conda-forge'
188-
PYTHON_VERSION: '3.8'
189-
BLAS: 'openblas'
183+
LOCK_FILE: './build_tools/azure/py38_conda_forge_openblas_ubuntu_1804_linux-64_conda.lock'
190184
COVERAGE: 'false'
191185
BUILD_WITH_ICC: 'false'
192186
SKLEARN_TESTS_GLOBAL_RANDOM_SEED: '0' # non-default seed
@@ -207,30 +201,21 @@ jobs:
207201
# i.e. numpy 1.17.4 and scipy 1.3.3
208202
ubuntu_atlas:
209203
DISTRIB: 'ubuntu'
210-
JOBLIB_VERSION: 'min'
211-
PANDAS_VERSION: 'none'
212-
THREADPOOLCTL_VERSION: 'min'
204+
LOCK_FILE: './build_tools/azure/ubuntu_atlas_lock.txt'
213205
COVERAGE: 'false'
214206
SKLEARN_TESTS_GLOBAL_RANDOM_SEED: '1' # non-default seed
215207
# Linux + Python 3.8 build with OpenBLAS
216208
py38_conda_defaults_openblas:
217209
DISTRIB: 'conda'
218-
CONDA_CHANNEL: 'defaults' # Anaconda main channel
219-
PYTHON_VERSION: '3.8'
220-
BLAS: 'openblas'
221-
NUMPY_VERSION: 'min'
222-
SCIPY_VERSION: 'min'
223-
MATPLOTLIB_VERSION: 'min'
224-
THREADPOOLCTL_VERSION: '2.2.0'
210+
LOCK_FILE: './build_tools/azure/py38_conda_defaults_openblas_linux-64_conda.lock'
225211
SKLEARN_ENABLE_DEBUG_CYTHON_DIRECTIVES: '1'
226212
SKLEARN_RUN_FLOAT32_TESTS: '1'
227213
SKLEARN_TESTS_GLOBAL_RANDOM_SEED: '2' # non-default seed
228214
# Linux environment to test the latest available dependencies.
229215
# It runs tests requiring lightgbm, pandas and PyAMG.
230216
pylatest_pip_openblas_pandas:
231217
DISTRIB: 'conda-pip-latest'
232-
PYTHON_VERSION: '3.9'
233-
PYTEST_VERSION: '6.2.5'
218+
LOCK_FILE: './build_tools/azure/pylatest_pip_openblas_pandas_linux-64_conda.lock'
234219
CHECK_PYTEST_SOFT_DEPENDENCY: 'true'
235220
TEST_DOCSTRINGS: 'true'
236221
CHECK_WARNINGS: 'true'
@@ -248,13 +233,11 @@ jobs:
248233
)
249234
matrix:
250235
debian_atlas_32bit:
251-
DISTRIB: 'debian-32'
252236
DOCKER_CONTAINER: 'i386/debian:11.2'
253-
JOBLIB_VERSION: 'min'
237+
DISTRIB: 'debian-32'
238+
LOCK_FILE: './build_tools/azure/debian_atlas_32bit_lock.txt'
254239
# disable pytest xdist due to unknown bug with 32-bit container
255240
PYTEST_XDIST_VERSION: 'none'
256-
PYTEST_VERSION: 'min'
257-
THREADPOOLCTL_VERSION: '2.2.0'
258241
SKLEARN_TESTS_GLOBAL_RANDOM_SEED: '4' # non-default seed
259242

260243
- template: build_tools/azure/posix.yml
@@ -270,12 +253,11 @@ jobs:
270253
matrix:
271254
pylatest_conda_forge_mkl:
272255
DISTRIB: 'conda'
273-
BLAS: 'mkl'
274-
CONDA_CHANNEL: 'conda-forge'
256+
LOCK_FILE: './build_tools/azure/pylatest_conda_forge_mkl_osx-64_conda.lock'
275257
SKLEARN_TESTS_GLOBAL_RANDOM_SEED: '5' # non-default seed
276258
pylatest_conda_mkl_no_openmp:
277259
DISTRIB: 'conda'
278-
BLAS: 'mkl'
260+
LOCK_FILE: './build_tools/azure/pylatest_conda_mkl_no_openmp_osx-64_conda.lock'
279261
SKLEARN_TEST_NO_OPENMP: 'true'
280262
SKLEARN_SKIP_OPENMP_TEST: 'true'
281263
SKLEARN_TESTS_GLOBAL_RANDOM_SEED: '6' # non-default seed
Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
#
2+
# This file is autogenerated by pip-compile with python 3.9
3+
# To update, run:
4+
#
5+
# pip-compile --output-file=build_tools/azure/debian_atlas_32bit_lock.txt build_tools/azure/debian_atlas_32bit_requirements.txt
6+
#
7+
atomicwrites==1.4.0
8+
# via pytest
9+
attrs==21.4.0
10+
# via pytest
11+
cython==0.29.28
12+
# via -r build_tools/azure/debian_atlas_32bit_requirements.txt
13+
importlib-metadata==4.11.3
14+
# via pytest
15+
joblib==1.0.0
16+
# via -r build_tools/azure/debian_atlas_32bit_requirements.txt
17+
more-itertools==8.13.0
18+
# via pytest
19+
packaging==21.3
20+
# via pytest
21+
pluggy==0.13.1
22+
# via pytest
23+
py==1.11.0
24+
# via pytest
25+
pyparsing==3.0.9
26+
# via packaging
27+
pytest==5.0.1
28+
# via -r build_tools/azure/debian_atlas_32bit_requirements.txt
29+
threadpoolctl==2.2.0
30+
# via -r build_tools/azure/debian_atlas_32bit_requirements.txt
31+
wcwidth==0.2.5
32+
# via pytest
33+
zipp==3.8.0
34+
# via importlib-metadata
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
# DO NOT EDIT: this file is generated from the specification found in the
2+
# following script to centralize the configuration for all Azure CI builds:
3+
# build_tools/azure/update_environments_and_lock_files.py
4+
cython
5+
joblib==1.0.0 # min
6+
threadpoolctl==2.2.0
7+
pytest==5.0.1 # min

build_tools/azure/install.sh

Lines changed: 29 additions & 102 deletions
Original file line numberDiff line numberDiff line change
@@ -7,21 +7,8 @@ set -x
77
source build_tools/shared.sh
88

99
UNAMESTR=`uname`
10-
1110
CCACHE_LINKS_DIR="/tmp/ccache"
1211

13-
14-
make_conda() {
15-
TO_INSTALL="$@"
16-
if [[ "$DISTRIB" == *"mamba"* ]]; then
17-
mamba create -n $VIRTUALENV --yes $TO_INSTALL
18-
else
19-
conda config --show
20-
conda create -n $VIRTUALENV --yes $TO_INSTALL
21-
fi
22-
source activate $VIRTUALENV
23-
}
24-
2512
setup_ccache() {
2613
CCACHE_BIN=`which ccache || echo ""`
2714
if [[ "${CCACHE_BIN}" == "" ]]; then
@@ -53,8 +40,8 @@ pre_python_environment_install() {
5340
python3-matplotlib libatlas3-base libatlas-base-dev \
5441
python3-virtualenv python3-pandas ccache
5542

56-
elif [[ "$DISTRIB" == "conda-mamba-pypy3" ]]; then
57-
# condaforge/mambaforge-pypy3 needs compilers
43+
elif [[ "$DISTRIB" == "conda-pypy3" ]]; then
44+
# need compilers
5845
apt-get -yq update
5946
apt-get -yq install build-essential
6047

@@ -63,6 +50,14 @@ pre_python_environment_install() {
6350
sudo apt-get -yq update
6451
sudo apt-get install -yq ccache
6552
sudo apt-get build-dep -yq python3 python3-dev
53+
setup_ccache # speed-up the build of CPython itself
54+
# build Python nogil
55+
PYTHON_NOGIL_CLONE_PATH=../nogil
56+
git clone --depth 1 https://github.com/colesbury/nogil $PYTHON_NOGIL_CLONE_PATH
57+
cd $PYTHON_NOGIL_CLONE_PATH
58+
./configure && make -j 2
59+
export PYTHON_NOGIL_PATH="${PYTHON_NOGIL_CLONE_PATH}/python"
60+
cd $OLDPWD
6661

6762
elif [[ "$BUILD_WITH_ICC" == "true" ]]; then
6863
wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
@@ -76,103 +71,35 @@ pre_python_environment_install() {
7671
fi
7772
}
7873

79-
python_environment_install() {
80-
if [[ "$DISTRIB" == "conda" || "$DISTRIB" == *"mamba"* ]]; then
81-
82-
if [[ "$CONDA_CHANNEL" != "" ]]; then
83-
TO_INSTALL="--override-channels -c $CONDA_CHANNEL"
84-
else
85-
TO_INSTALL=""
86-
fi
74+
python_environment_install_and_activate() {
75+
if [[ "$DISTRIB" == "conda"* ]]; then
76+
conda update -n base conda -y
77+
# pin conda-lock to latest released version (needs manual update from time to time)
78+
conda install -c conda-forge conda-lock==1.0.5 -y
79+
conda-lock install --name $VIRTUALENV $LOCK_FILE
80+
source activate $VIRTUALENV
8781

88-
if [[ "$DISTRIB" == *"pypy"* ]]; then
89-
TO_INSTALL="$TO_INSTALL pypy"
90-
else
91-
TO_INSTALL="$TO_INSTALL python=$PYTHON_VERSION"
92-
fi
93-
94-
TO_INSTALL="$TO_INSTALL ccache pip blas[build=$BLAS]"
95-
96-
TO_INSTALL="$TO_INSTALL $(get_dep numpy $NUMPY_VERSION)"
97-
TO_INSTALL="$TO_INSTALL $(get_dep scipy $SCIPY_VERSION)"
98-
TO_INSTALL="$TO_INSTALL $(get_dep cython $CYTHON_VERSION)"
99-
TO_INSTALL="$TO_INSTALL $(get_dep joblib $JOBLIB_VERSION)"
100-
TO_INSTALL="$TO_INSTALL $(get_dep pandas $PANDAS_VERSION)"
101-
TO_INSTALL="$TO_INSTALL $(get_dep pyamg $PYAMG_VERSION)"
102-
TO_INSTALL="$TO_INSTALL $(get_dep Pillow $PILLOW_VERSION)"
103-
TO_INSTALL="$TO_INSTALL $(get_dep matplotlib $MATPLOTLIB_VERSION)"
104-
105-
if [[ "$UNAMESTR" == "Darwin" ]] && [[ "$SKLEARN_TEST_NO_OPENMP" != "true" ]]; then
106-
TO_INSTALL="$TO_INSTALL compilers llvm-openmp"
107-
fi
108-
109-
make_conda $TO_INSTALL
110-
111-
elif [[ "$DISTRIB" == "ubuntu" ]] || [[ "$DISTRIB" == "debian-32" ]]; then
82+
elif [[ "$DISTRIB" == "ubuntu" || "$DISTRIB" == "debian-32" ]]; then
11283
python3 -m virtualenv --system-site-packages --python=python3 $VIRTUALENV
11384
source $VIRTUALENV/bin/activate
85+
pip install -r "${LOCK_FILE}"
11486

115-
python -m pip install $(get_dep cython $CYTHON_VERSION) \
116-
$(get_dep joblib $JOBLIB_VERSION)
117-
118-
elif [[ "$DISTRIB" == "conda-pip-latest" ]]; then
119-
# Since conda main channel usually lacks behind on the latest releases,
120-
# we use pypi to test against the latest releases of the dependencies.
121-
# conda is still used as a convenient way to install Python and pip.
122-
make_conda "ccache python=$PYTHON_VERSION"
123-
python -m pip install -U pip
124-
125-
python -m pip install pandas matplotlib scikit-image pyamg
126-
# do not install dependencies for lightgbm since it requires scikit-learn.
127-
python -m pip install "lightgbm>=3.0.0" --no-deps
87+
elif [[ "$DISTRIB" == "pip-nogil" ]]; then
88+
${PYTHON_NOGIL_PATH} -m venv $VIRTUALENV
89+
source $VIRTUALENV/bin/activate
90+
pip install -r "${LOCK_FILE}"
91+
fi
12892

129-
elif [[ "$DISTRIB" == "conda-pip-scipy-dev" ]]; then
130-
make_conda "ccache python=$PYTHON_VERSION"
131-
python -m pip install -U pip
132-
echo "Installing numpy and scipy master wheels"
93+
if [[ "$DISTRIB" == "conda-pip-scipy-dev" ]]; then
94+
echo "Installing development dependency wheels"
13395
dev_anaconda_url=https://pypi.anaconda.org/scipy-wheels-nightly/simple
13496
pip install --pre --upgrade --timeout=60 --extra-index $dev_anaconda_url numpy pandas scipy
97+
echo "Installing Cython from PyPI enabling pre-releases"
13598
pip install --pre cython
13699
echo "Installing joblib master"
137100
pip install https://github.com/joblib/joblib/archive/master.zip
138101
echo "Installing pillow master"
139102
pip install https://github.com/python-pillow/Pillow/archive/main.zip
140-
141-
elif [[ "$DISTRIB" == "pip-nogil" ]]; then
142-
setup_ccache # speed-up the build of CPython it-self
143-
ORIGINAL_FOLDER=`pwd`
144-
cd ..
145-
git clone --depth 1 https://github.com/colesbury/nogil
146-
cd nogil
147-
./configure && make -j 2
148-
./python -m venv $ORIGINAL_FOLDER/$VIRTUALENV
149-
cd $ORIGINAL_FOLDER
150-
source $VIRTUALENV/bin/activate
151-
152-
python -m pip install -U pip
153-
# The pip version that comes with the nogil branch of CPython
154-
# automatically uses the custom nogil index as its highest priority
155-
# index to fetch patched versions of libraries with native code that
156-
# would otherwise depend on the GIL.
157-
echo "Installing build dependencies with pip from the nogil repository: https://d1yxz45j0ypngg.cloudfront.net/"
158-
pip install numpy scipy cython joblib threadpoolctl
159-
160-
fi
161-
162-
python -m pip install $(get_dep threadpoolctl $THREADPOOLCTL_VERSION) \
163-
$(get_dep pytest $PYTEST_VERSION) \
164-
$(get_dep pytest-xdist $PYTEST_XDIST_VERSION)
165-
166-
if [[ "$COVERAGE" == "true" ]]; then
167-
# XXX: coverage is temporary pinned to 6.2 because 6.3 is not fork-safe
168-
# cf. https://github.com/nedbat/coveragepy/issues/1310
169-
python -m pip install codecov pytest-cov coverage==6.2
170-
fi
171-
172-
if [[ "$TEST_DOCSTRINGS" == "true" ]]; then
173-
# numpydoc requires sphinx
174-
python -m pip install sphinx
175-
python -m pip install numpydoc
176103
fi
177104
}
178105

@@ -184,7 +111,7 @@ scikit_learn_install() {
184111
# workers with 2 cores when building the compiled extensions of scikit-learn.
185112
export SKLEARN_BUILD_PARALLEL=3
186113

187-
if [[ "$UNAMESTR" == "Darwin" ]] && [[ "$SKLEARN_TEST_NO_OPENMP" == "true" ]]; then
114+
if [[ "$UNAMESTR" == "Darwin" && "$SKLEARN_TEST_NO_OPENMP" == "true" ]]; then
188115
# Without openmp, we use the system clang. Here we use /usr/bin/ar
189116
# instead because llvm-ar errors
190117
export AR=/usr/bin/ar
@@ -220,7 +147,7 @@ scikit_learn_install() {
220147

221148
main() {
222149
pre_python_environment_install
223-
python_environment_install
150+
python_environment_install_and_activate
224151
scikit_learn_install
225152
}
226153

0 commit comments

Comments
 (0)
0