7
7
source build_tools/shared.sh
8
8
9
9
UNAMESTR=` uname`
10
-
11
10
CCACHE_LINKS_DIR=" /tmp/ccache"
12
11
13
-
14
- make_conda () {
15
- TO_INSTALL=" $@ "
16
- if [[ " $DISTRIB " == * " mamba" * ]]; then
17
- mamba create -n $VIRTUALENV --yes $TO_INSTALL
18
- else
19
- conda config --show
20
- conda create -n $VIRTUALENV --yes $TO_INSTALL
21
- fi
22
- source activate $VIRTUALENV
23
- }
24
-
25
12
setup_ccache () {
26
13
CCACHE_BIN=` which ccache || echo " " `
27
14
if [[ " ${CCACHE_BIN} " == " " ]]; then
@@ -53,8 +40,8 @@ pre_python_environment_install() {
53
40
python3-matplotlib libatlas3-base libatlas-base-dev \
54
41
python3-virtualenv python3-pandas ccache
55
42
56
- elif [[ " $DISTRIB " == " conda-mamba- pypy3" ]]; then
57
- # condaforge/mambaforge-pypy3 needs compilers
43
+ elif [[ " $DISTRIB " == " conda-pypy3" ]]; then
44
+ # need compilers
58
45
apt-get -yq update
59
46
apt-get -yq install build-essential
60
47
@@ -63,6 +50,14 @@ pre_python_environment_install() {
63
50
sudo apt-get -yq update
64
51
sudo apt-get install -yq ccache
65
52
sudo apt-get build-dep -yq python3 python3-dev
53
+ setup_ccache # speed-up the build of CPython itself
54
+ # build Python nogil
55
+ PYTHON_NOGIL_CLONE_PATH=../nogil
56
+ git clone --depth 1 https://github.com/colesbury/nogil $PYTHON_NOGIL_CLONE_PATH
57
+ cd $PYTHON_NOGIL_CLONE_PATH
58
+ ./configure && make -j 2
59
+ export PYTHON_NOGIL_PATH=" ${PYTHON_NOGIL_CLONE_PATH} /python"
60
+ cd $OLDPWD
66
61
67
62
elif [[ " $BUILD_WITH_ICC " == " true" ]]; then
68
63
wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
@@ -76,103 +71,35 @@ pre_python_environment_install() {
76
71
fi
77
72
}
78
73
79
- python_environment_install () {
80
- if [[ " $DISTRIB " == " conda" || " $DISTRIB " == * " mamba" * ]]; then
81
-
82
- if [[ " $CONDA_CHANNEL " != " " ]]; then
83
- TO_INSTALL=" --override-channels -c $CONDA_CHANNEL "
84
- else
85
- TO_INSTALL=" "
86
- fi
74
+ python_environment_install_and_activate () {
75
+ if [[ " $DISTRIB " == " conda" * ]]; then
76
+ conda update -n base conda -y
77
+ # pin conda-lock to latest released version (needs manual update from time to time)
78
+ conda install -c conda-forge conda-lock==1.0.5 -y
79
+ conda-lock install --name $VIRTUALENV $LOCK_FILE
80
+ source activate $VIRTUALENV
87
81
88
- if [[ " $DISTRIB " == * " pypy" * ]]; then
89
- TO_INSTALL=" $TO_INSTALL pypy"
90
- else
91
- TO_INSTALL=" $TO_INSTALL python=$PYTHON_VERSION "
92
- fi
93
-
94
- TO_INSTALL=" $TO_INSTALL ccache pip blas[build=$BLAS ]"
95
-
96
- TO_INSTALL=" $TO_INSTALL $( get_dep numpy $NUMPY_VERSION ) "
97
- TO_INSTALL=" $TO_INSTALL $( get_dep scipy $SCIPY_VERSION ) "
98
- TO_INSTALL=" $TO_INSTALL $( get_dep cython $CYTHON_VERSION ) "
99
- TO_INSTALL=" $TO_INSTALL $( get_dep joblib $JOBLIB_VERSION ) "
100
- TO_INSTALL=" $TO_INSTALL $( get_dep pandas $PANDAS_VERSION ) "
101
- TO_INSTALL=" $TO_INSTALL $( get_dep pyamg $PYAMG_VERSION ) "
102
- TO_INSTALL=" $TO_INSTALL $( get_dep Pillow $PILLOW_VERSION ) "
103
- TO_INSTALL=" $TO_INSTALL $( get_dep matplotlib $MATPLOTLIB_VERSION ) "
104
-
105
- if [[ " $UNAMESTR " == " Darwin" ]] && [[ " $SKLEARN_TEST_NO_OPENMP " != " true" ]]; then
106
- TO_INSTALL=" $TO_INSTALL compilers llvm-openmp"
107
- fi
108
-
109
- make_conda $TO_INSTALL
110
-
111
- elif [[ " $DISTRIB " == " ubuntu" ]] || [[ " $DISTRIB " == " debian-32" ]]; then
82
+ elif [[ " $DISTRIB " == " ubuntu" || " $DISTRIB " == " debian-32" ]]; then
112
83
python3 -m virtualenv --system-site-packages --python=python3 $VIRTUALENV
113
84
source $VIRTUALENV /bin/activate
85
+ pip install -r " ${LOCK_FILE} "
114
86
115
- python -m pip install $( get_dep cython $CYTHON_VERSION ) \
116
- $( get_dep joblib $JOBLIB_VERSION )
117
-
118
- elif [[ " $DISTRIB " == " conda-pip-latest" ]]; then
119
- # Since conda main channel usually lacks behind on the latest releases,
120
- # we use pypi to test against the latest releases of the dependencies.
121
- # conda is still used as a convenient way to install Python and pip.
122
- make_conda " ccache python=$PYTHON_VERSION "
123
- python -m pip install -U pip
124
-
125
- python -m pip install pandas matplotlib scikit-image pyamg
126
- # do not install dependencies for lightgbm since it requires scikit-learn.
127
- python -m pip install " lightgbm>=3.0.0" --no-deps
87
+ elif [[ " $DISTRIB " == " pip-nogil" ]]; then
88
+ ${PYTHON_NOGIL_PATH} -m venv $VIRTUALENV
89
+ source $VIRTUALENV /bin/activate
90
+ pip install -r " ${LOCK_FILE} "
91
+ fi
128
92
129
- elif [[ " $DISTRIB " == " conda-pip-scipy-dev" ]]; then
130
- make_conda " ccache python=$PYTHON_VERSION "
131
- python -m pip install -U pip
132
- echo " Installing numpy and scipy master wheels"
93
+ if [[ " $DISTRIB " == " conda-pip-scipy-dev" ]]; then
94
+ echo " Installing development dependency wheels"
133
95
dev_anaconda_url=https://pypi.anaconda.org/scipy-wheels-nightly/simple
134
96
pip install --pre --upgrade --timeout=60 --extra-index $dev_anaconda_url numpy pandas scipy
97
+ echo " Installing Cython from PyPI enabling pre-releases"
135
98
pip install --pre cython
136
99
echo " Installing joblib master"
137
100
pip install https://github.com/joblib/joblib/archive/master.zip
138
101
echo " Installing pillow master"
139
102
pip install https://github.com/python-pillow/Pillow/archive/main.zip
140
-
141
- elif [[ " $DISTRIB " == " pip-nogil" ]]; then
142
- setup_ccache # speed-up the build of CPython it-self
143
- ORIGINAL_FOLDER=` pwd`
144
- cd ..
145
- git clone --depth 1 https://github.com/colesbury/nogil
146
- cd nogil
147
- ./configure && make -j 2
148
- ./python -m venv $ORIGINAL_FOLDER /$VIRTUALENV
149
- cd $ORIGINAL_FOLDER
150
- source $VIRTUALENV /bin/activate
151
-
152
- python -m pip install -U pip
153
- # The pip version that comes with the nogil branch of CPython
154
- # automatically uses the custom nogil index as its highest priority
155
- # index to fetch patched versions of libraries with native code that
156
- # would otherwise depend on the GIL.
157
- echo " Installing build dependencies with pip from the nogil repository: https://d1yxz45j0ypngg.cloudfront.net/"
158
- pip install numpy scipy cython joblib threadpoolctl
159
-
160
- fi
161
-
162
- python -m pip install $( get_dep threadpoolctl $THREADPOOLCTL_VERSION ) \
163
- $( get_dep pytest $PYTEST_VERSION ) \
164
- $( get_dep pytest-xdist $PYTEST_XDIST_VERSION )
165
-
166
- if [[ " $COVERAGE " == " true" ]]; then
167
- # XXX: coverage is temporary pinned to 6.2 because 6.3 is not fork-safe
168
- # cf. https://github.com/nedbat/coveragepy/issues/1310
169
- python -m pip install codecov pytest-cov coverage==6.2
170
- fi
171
-
172
- if [[ " $TEST_DOCSTRINGS " == " true" ]]; then
173
- # numpydoc requires sphinx
174
- python -m pip install sphinx
175
- python -m pip install numpydoc
176
103
fi
177
104
}
178
105
@@ -184,7 +111,7 @@ scikit_learn_install() {
184
111
# workers with 2 cores when building the compiled extensions of scikit-learn.
185
112
export SKLEARN_BUILD_PARALLEL=3
186
113
187
- if [[ " $UNAMESTR " == " Darwin" ]] && [[ " $SKLEARN_TEST_NO_OPENMP " == " true" ]]; then
114
+ if [[ " $UNAMESTR " == " Darwin" && " $SKLEARN_TEST_NO_OPENMP " == " true" ]]; then
188
115
# Without openmp, we use the system clang. Here we use /usr/bin/ar
189
116
# instead because llvm-ar errors
190
117
export AR=/usr/bin/ar
@@ -220,7 +147,7 @@ scikit_learn_install() {
220
147
221
148
main () {
222
149
pre_python_environment_install
223
- python_environment_install
150
+ python_environment_install_and_activate
224
151
scikit_learn_install
225
152
}
226
153
0 commit comments