33set -e
44set -x
55
6- UNAMESTR=` uname`
6+ # defines the get_dep and show_installed_libraries functions
7+ source build_tools/shared.sh
78
8- if [[ " $DISTRIB " == " conda-mamba-pypy3" ]]; then
9- # condaforge/mambaforge-pypy3 needs compilers
10- apt-get -yq update
11- apt-get -yq install build-essential
12- fi
9+ UNAMESTR=` uname`
1310
1411make_conda () {
1512 TO_INSTALL=" $@ "
@@ -33,155 +30,161 @@ setup_ccache() {
3330 ccache -M 256M
3431}
3532
36- # defines the get_dep and show_installed_libraries functions
37- source build_tools/shared.sh
33+ pre_python_environment_install () {
34+ if [[ " $DISTRIB " == " ubuntu" ]]; then
35+ sudo add-apt-repository --remove ppa:ubuntu-toolchain-r/test
36+ sudo apt-get update
37+ sudo apt-get install python3-scipy python3-matplotlib \
38+ libatlas3-base libatlas-base-dev python3-virtualenv ccache
3839
39- if [[ " $DISTRIB " == " conda" || " $DISTRIB " == * " mamba" * ]]; then
40+ elif [[ " $DISTRIB " == " debian-32" ]]; then
41+ apt-get update
42+ apt-get install -y python3-dev python3-numpy python3-scipy \
43+ python3-matplotlib libatlas3-base libatlas-base-dev \
44+ python3-virtualenv python3-pandas ccache
4045
41- if [[ " $CONDA_CHANNEL " != " " ]]; then
42- TO_INSTALL=" --override-channels -c $CONDA_CHANNEL "
43- else
44- TO_INSTALL=" "
45- fi
46+ elif [[ " $DISTRIB " == " conda-mamba-pypy3" ]]; then
47+ # condaforge/mambaforge-pypy3 needs compilers
48+ apt-get -yq update
49+ apt-get -yq install build-essential
4650
47- if [[ " $DISTRIB " == * " pypy" * ]]; then
48- TO_INSTALL=" $TO_INSTALL pypy"
49- else
50- TO_INSTALL=" $TO_INSTALL python=$PYTHON_VERSION "
51+ elif [[ " $BUILD_WITH_ICC " == " true" ]]; then
52+ wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
53+ sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
54+ rm GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
55+
67E6
sudo add-apt-repository " deb https://apt.repos.intel.com/oneapi all main"
56+ sudo apt-get update
57+ sudo apt-get install intel-oneapi-compiler-dpcpp-cpp-and-cpp-classic
58+ source /opt/intel/oneapi/setvars.sh
5159 fi
60+ }
5261
53- TO_INSTALL=" $TO_INSTALL ccache pip blas[build=$BLAS ]"
62+ python_environment_install () {
63+ if [[ " $DISTRIB " == " conda" || " $DISTRIB " == * " mamba" * ]]; then
5464
55- TO_INSTALL=" $TO_INSTALL $( get_dep numpy $NUMPY_VERSION ) "
56- TO_INSTALL=" $TO_INSTALL $( get_dep scipy $SCIPY_VERSION ) "
57- TO_INSTALL=" $TO_INSTALL $( get_dep cython $CYTHON_VERSION ) "
58- TO_INSTALL=" $TO_INSTALL $( get_dep joblib $JOBLIB_VERSION ) "
59- TO_INSTALL=" $TO_INSTALL $( get_dep pandas $PANDAS_VERSION ) "
60- TO_INSTALL=" $TO_INSTALL $( get_dep pyamg $PYAMG_VERSION ) "
61- TO_INSTALL=" $TO_INSTALL $( get_dep Pillow $PILLOW_VERSION ) "
62- TO_INSTALL=" $TO_INSTALL $( get_dep matplotlib $MATPLOTLIB_VERSION ) "
65+ if [[ " $CONDA_CHANNEL " != " " ]]; then
66+ TO_INSTALL=" --override-channels -c $CONDA_CHANNEL "
67+ else
68+ TO_INSTALL=" "
69+ fi
6370
64- if [[ " $UNAMESTR " == " Darwin" ]]; then
65- if [[ " $SKLEARN_TEST_NO_OPENMP " != " true" ]]; then
66- TO_INSTALL=" $TO_INSTALL compilers llvm-openmp"
71+ if [[ " $DISTRIB " == * " pypy" * ]]; then
72+ TO_INSTALL=" $TO_INSTALL pypy"
6773 else
68- # Without openmp, we use the system clang. Here we use /usr/bin/ar
69- # instead because llvm-ar errors
70- export AR=/usr/bin/ar
74+ TO_INSTALL=" $TO_INSTALL python=$PYTHON_VERSION "
7175 fi
72- else
73- # FIXME: temporary fix to link against system libraries on linux
74- export LDFLAGS=" $LDFLAGS -Wl,--sysroot=/"
76+
77+ TO_INSTALL=" $TO_INSTALL ccache pip blas[build=$BLAS ]"
78+
79+ TO_INSTALL=" $TO_INSTALL $( get_dep numpy $NUMPY_VERSION ) "
80+ TO_INSTALL=" $TO_INSTALL $( get_dep scipy $SCIPY_VERSION ) "
81+ TO_INSTALL=" $TO_INSTALL $( get_dep cython $CYTHON_VERSION ) "
82+ TO_INSTALL=" $TO_INSTALL $( get_dep joblib $JOBLIB_VERSION ) "
83+ TO_INSTALL=" $TO_INSTALL $( get_dep pandas $PANDAS_VERSION ) "
84+ TO_INSTALL=" $TO_INSTALL $( get_dep pyamg $PYAMG_VERSION ) "
85+ TO_INSTALL=" $TO_INSTALL $( get_dep Pillow $PILLOW_VERSION ) "
86+ TO_INSTALL=" $TO_INSTALL $( get_dep matplotlib $MATPLOTLIB_VERSION ) "
87+
88+ if [[ " $UNAMESTR " == " Darwin" ]] && [[ " $SKLEARN_TEST_NO_OPENMP " != " true" ]]; then
89+ TO_INSTALL=" $TO_INSTALL compilers llvm-openmp"
90+ fi
91+
92+ make_conda $TO_INSTALL
93+
94+ elif [[ " $DISTRIB " == " ubuntu" ]] || [[ " $DISTRIB " == " debian-32" ]]; then
95+ python3 -m virtualenv --system-site-packages --python=python3 $VIRTUALENV
96+ source $VIRTUALENV /bin/activate
97+
98+ python -m pip install $( get_dep cython $CYTHON_VERSION ) \
99+ $( get_dep joblib $JOBLIB_VERSION )
100+
101+ elif [[ " $DISTRIB " == " conda-pip-latest" ]]; then
102+ # Since conda main channel usually lacks behind on the latest releases,
103+ # we use pypi to test against the latest releases of the dependencies.
104+ # conda is still used as a convenient way to install Python and pip.
105+ make_conda " ccache python=$PYTHON_VERSION "
106+ python -m pip install -U pip
107+
108+ python -m pip install pandas matplotlib scikit-image pyamg
109+ # do not install dependencies for lightgbm since it requires scikit-learn.
110+ python -m pip install " lightgbm>=3.0.0" --no-deps
111+
112+ elif [[ " $DISTRIB " == " conda-pip-scipy-dev" ]]; then
113+ make_conda " ccache python=$PYTHON_VERSION "
114+ python -m pip install -U pip
115+ echo " Installing numpy and scipy master wheels"
116+ dev_anaconda_url=https://pypi.anaconda.org/scipy-wheels-nightly/simple
117+ pip install --pre --upgrade --timeout=60 --extra-index $dev_anaconda_url numpy pandas scipy
118+ pip install --pre cython
119+ echo " Installing joblib master"
120+ pip install https://github.com/joblib/joblib/archive/master.zip
121+ echo " Installing pillow master"
122+ pip install https://github.com/python-pillow/Pillow/archive/main.zip
75123 fi
76- make_conda $TO_INSTALL
77- setup_ccache
78124
79- elif [[ " $DISTRIB " == " ubuntu" ]]; then
80- sudo add-apt-repository --remove ppa:ubuntu-toolchain-r/test
81- sudo apt-get update
82- sudo apt-get install python3-scipy python3-matplotlib libatlas3-base libatlas-base-dev python3-virtualenv ccache
83- python3 -m virtualenv --system-site-packages --python=python3 $VIRTUALENV
84- source $VIRTUALENV /bin/activate
85- setup_ccache
86- python -m pip install $( get_dep cython $CYTHON_VERSION ) \
87- $( get_dep joblib $JOBLIB_VERSION )
125+ python -m pip install $( get_dep threadpoolctl $THREADPOOLCTL_VERSION ) \
126+ $( get_dep pytest $PYTEST_VERSION ) \
127+ $( get_dep pytest-xdist $PYTEST_XDIST_VERSION )
88128
89- elif [[ " $DISTRIB " == " debian-32" ]]; then
90- apt-get update
91- apt-get install -y python3-dev python3-numpy python3-scipy python3-matplotlib libatlas3-base libatlas-base-dev python3-virtualenv python3-pandas ccache
129+ if [[ " $COVERAGE " == " true" ]]; then
130+ # XXX: coverage is temporary pinned to 6.2 because 6.3 is not fork-safe
131+ # cf. https://github.com/nedbat/coveragepy/issues/1310
132+ python -m pip install codecov pytest-cov coverage==6.2
133+ fi
92134
93- python3 -m virtualenv --system-site-packages --python=python3 $VIRTUALENV
94- source $VIRTUALENV /bin/activate
95- setup_ccache
96- python -m pip install $( get_dep cython $CYTHON_VERSION ) \
97- $( get_dep joblib $JOBLIB_VERSION )
98-
99- elif [[ " $DISTRIB " == " conda-pip-latest" ]]; then
100- # FIXME: temporary fix to link against system libraries on linux
101- export LDFLAGS=" $LDFLAGS -Wl,--sysroot=/"
102- # Since conda main channel usually lacks behind on the latest releases,
103- # we use pypi to test against the latest releases of the dependencies.
104- # conda is still used as a convenient way to install Python and pip.
105- make_conda " ccache python=$PYTHON_VERSION "
106- setup_ccache
107- python -m pip install -U pip
108-
109- # Do not build scikit-image from source because it is an optional dependency
110- python -m pip install --only-binary :all: scikit-image || true
111-
112- python -m pip install pandas matplotlib pyamg
113- # do not install dependencies for lightgbm since it requires scikit-learn.
114- python -m pip install " lightgbm>=3.0.0" --no-deps
115- elif [[ " $DISTRIB " == " conda-pip-scipy-dev" ]]; then
116- # FIXME: temporary fix to link against system libraries on linux
117- export LDFLAGS=" $LDFLAGS -Wl,--sysroot=/"
118- make_conda " ccache python=$PYTHON_VERSION "
119- python -m pip install -U pip
120- echo " Installing numpy and scipy master wheels"
121- dev_anaconda_url=https://pypi.anaconda.org/scipy-wheels-nightly/simple
122- pip install --pre --upgrade --timeout=60 --extra-index $dev_anaconda_url numpy pandas scipy
123- pip install --pre cython
135+ if [[ " $TEST_DOCSTRINGS " == " true" ]]; then
136+ # numpydoc requires sphinx
137+ python -m pip install sphinx
138+ python -m pip install numpydoc
139+ fi
140+ }
141+
142+ scikit_learn_install () {
124143 setup_ccache
125- echo " Installing joblib master"
126- pip install https://github.com/joblib/joblib/archive/master.zip
127- echo " Installing pillow master"
128- pip install https://github.com/python-pillow/Pillow/archive/main.zip
129- fi
130-
131- python -m pip install $( get_dep threadpoolctl $THREADPOOLCTL_VERSION ) \
132- $( get_dep pytest $PYTEST_VERSION ) \
133- $( get_dep pytest-xdist $PYTEST_XDIST_VERSION )
134-
135- if [[ " $COVERAGE " == " true" ]]; then
136- # XXX: coverage is temporary pinned to 6.2 because 6.3 is not fork-safe
137- # cf. https://github.com/nedbat/coveragepy/issues/1310
138- python -m pip install codecov pytest-cov coverage==6.2
139- fi
140-
141- if [[ " $TEST_DOCSTRINGS " == " true" ]]; then
142- # numpydoc requires sphinx
143- python -m pip install sphinx
144- python -m pip install numpydoc
145- fi
146-
147- python --version
148- python -c " import numpy; print('numpy %s' % numpy.__version__)"
149- python -c " import scipy; print('scipy %s' % scipy.__version__)"
150- python -c " \
151- try:
152- import pandas
153- print('pandas %s' % pandas.__version__)
154- except ImportError:
155- print('pandas not installed')
156- "
157- # Set parallelism to 3 to overlap IO bound tasks with CPU bound tasks on CI
158- # workers with 2 cores when building the compiled extensions of scikit-learn.
159- export SKLEARN_BUILD_PARALLEL=3
160-
161- show_installed_libraries
162-
163- if [[ " $DISTRIB " == " conda-pip-latest" ]]; then
164- # Check that pip can automatically build scikit-learn with the build
165- # dependencies specified in pyproject.toml using an isolated build
166- # environment:
167- pip install --verbose --editable .
168- else
169- if [[ " $BUILD_WITH_ICC " == " true" ]]; then
170- wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
171- sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
172- rm GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
173- sudo add-apt-repository " deb https://apt.repos.intel.com/oneapi all main"
174- sudo apt-get update
175- sudo apt-get install intel-oneapi-compiler-dpcpp-cpp-and-cpp-classic
176- source /opt/intel/oneapi/setvars.sh
144+ show_installed_libraries
145+
146+ # Set parallelism to 3 to overlap IO bound tasks with CPU bound tasks on CI
147+ # workers with 2 cores when building the compiled extensions of scikit-learn.
148+ export SKLEARN_BUILD_PARALLEL=3
177149
150+ if [[ " $UNAMESTR " == " Darwin" ]] && [[ " $SKLEARN_TEST_NO_OPENMP " == " true" ]]; then
151+ # Without openmp, we use the system clang. Here we use /usr/bin/ar
152+ # instead because llvm-ar errors
153+ export AR=/usr/bin/ar
154+ fi
155+
156+ if [[ " $UNAMESTR " == " Linux" ]]; then
157+ # FIXME: temporary fix to link against system libraries on linux
158+ # https://github.com/scikit-learn/scikit-learn/issues/20640
159+ export LDFLAGS=" $LDFLAGS -Wl,--sysroot=/"
160+ fi
161+
162+ if [[ " $BUILD_WITH_ICC " == " true" ]]; then
178163 # The "build_clib" command is implicitly used to build "libsvm-skl".
179164 # To compile with a different compiler, we also need to specify the
180165 # compiler for this command
181166 python setup.py build_ext --compiler=intelem -i build_clib --compiler=intelem
182167 fi
183- # Use the pre-installed build dependencies and build directly in the
184- # current environment.
185- python setup.py develop
186- fi
187- ccache -s
168+
169+ # TODO use a specific variable for this rather than using a particular build ...
170+ if [[ " $DISTRIB " == " conda-pip-latest" ]]; then
171+ # Check that pip can automatically build scikit-learn with the build
172+ # dependencies specified in pyproject.toml using an isolated build
173+ # environment:
174+ pip install --verbose --editable .
175+ else
176+ # Use the pre-installed build dependencies and build directly in the
177+ # current environment.
178+ python setup.py develop
179+ fi
180+
181+ ccache -s
182+ }
183+
184+ main () {
185+ pre_python_environment_install
186+ python_environment_install
187+ scikit_learn_install
188+ }
189+
190+ main
0 commit comments