From 771573ec4272e6049778f9ccf7407a8afc477a40 Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Sun, 2 Oct 2022 17:53:24 -0400 Subject: [PATCH 01/26] BLD Migrate away from distutils and only use setuptools --- MANIFEST.in | 2 +- azure-pipelines.yml | 39 +- doc/developers/advanced_installation.rst | 69 --- setup.cfg | 6 + setup.py | 511 +++++++++++++----- sklearn/__check_build/setup.py | 21 - sklearn/_build_utils/__init__.py | 6 +- sklearn/_build_utils/pre_build_helpers.py | 43 +- sklearn/_loss/setup.py | 25 - sklearn/cluster/setup.py | 68 --- sklearn/datasets/setup.py | 27 - sklearn/decomposition/setup.py | 35 -- sklearn/ensemble/setup.py | 73 --- sklearn/externals/_numpy_compiler_patch.py | 139 ----- sklearn/feature_extraction/setup.py | 22 - sklearn/inspection/setup.py | 18 - sklearn/linear_model/setup.py | 49 -- sklearn/manifold/setup.py | 39 -- .../_pairwise_distances_reduction/setup.py | 55 -- sklearn/metrics/cluster/setup.py | 27 - sklearn/neighbors/setup.py | 44 -- sklearn/preprocessing/setup.py | 22 - sklearn/setup.py | 93 ---- sklearn/svm/setup.py | 134 ----- sklearn/tree/setup.py | 50 -- sklearn/utils/setup.py | 133 ----- 26 files changed, 414 insertions(+), 1336 deletions(-) delete mode 100644 sklearn/__check_build/setup.py delete mode 100644 sklearn/_loss/setup.py delete mode 100644 sklearn/cluster/setup.py delete mode 100644 sklearn/datasets/setup.py delete mode 100644 sklearn/decomposition/setup.py delete mode 100644 sklearn/ensemble/setup.py delete mode 100644 sklearn/externals/_numpy_compiler_patch.py delete mode 100644 sklearn/feature_extraction/setup.py delete mode 100644 sklearn/inspection/setup.py delete mode 100644 sklearn/linear_model/setup.py delete mode 100644 sklearn/manifold/setup.py delete mode 100644 sklearn/metrics/_pairwise_distances_reduction/setup.py delete mode 100644 sklearn/metrics/cluster/setup.py delete mode 100644 sklearn/neighbors/setup.py delete mode 100644 sklearn/preprocessing/setup.py delete mode 100644 sklearn/setup.py delete mode 100644 sklearn/svm/setup.py delete mode 100644 sklearn/tree/setup.py delete mode 100644 sklearn/utils/setup.py diff --git a/MANIFEST.in b/MANIFEST.in index fe346e1f9ab8b..d332c24211b93 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,7 +1,7 @@ include *.rst recursive-include doc * recursive-include examples * -recursive-include sklearn *.c *.h *.pyx *.pxd *.pxi *.tp +recursive-include sklearn *.c *.cpp *.h *.pyx *.pxd *.pxi *.tp recursive-include sklearn/datasets *.csv *.csv.gz *.rst *.jpg *.txt *.arff.gz *.json.gz include COPYING include README.rst diff --git a/azure-pipelines.yml b/azure-pipelines.yml index eee5c150daffb..6addf67382019 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -106,26 +106,27 @@ jobs: LOCK_FILE: './build_tools/azure/python_nogil_lock.txt' COVERAGE: 'false' +# XXX: Intel C++ compiler support removed when migrating away from numpy.distuils # Check compilation with intel C++ compiler (ICC) -- template: build_tools/azure/posix.yml - parameters: - name: Linux_Nightly_ICC - vmImage: ubuntu-20.04 - dependsOn: [git_commit, linting] - condition: | - and( - succeeded(), - not(contains(dependencies['git_commit']['outputs']['commit.message'], '[ci skip]')), - or(eq(variables['Build.Reason'], 'Schedule'), - contains(dependencies['git_commit']['outputs']['commit.message'], '[icc-build]') - ) - ) - matrix: - pylatest_conda_forge_mkl: - DISTRIB: 'conda' - LOCK_FILE: 'build_tools/azure/pylatest_conda_forge_mkl_no_coverage_linux-64_conda.lock' - COVERAGE: 'false' - BUILD_WITH_ICC: 'true' +# - template: build_tools/azure/posix.yml +# parameters: +# name: Linux_Nightly_ICC +# vmImage: ubuntu-20.04 +# dependsOn: [git_commit, linting] +# condition: | +# and( +# succeeded(), +# not(contains(dependencies['git_commit']['outputs']['commit.message'], '[ci skip]')), +# or(eq(variables['Build.Reason'], 'Schedule'), +# contains(dependencies['git_commit']['outputs']['commit.message'], '[icc-build]') +# ) +# ) +# matrix: +# pylatest_conda_forge_mkl: +# DISTRIB: 'conda' +# LOCK_FILE: 'build_tools/azure/pylatest_conda_forge_mkl_no_coverage_linux-64_conda.lock' +# COVERAGE: 'false' +# BUILD_WITH_ICC: 'true' - template: build_tools/azure/posix-docker.yml parameters: diff --git a/doc/developers/advanced_installation.rst b/doc/developers/advanced_installation.rst index 658c1ff4c945d..68401c9fe6ec3 100644 --- a/doc/developers/advanced_installation.rst +++ b/doc/developers/advanced_installation.rst @@ -461,75 +461,6 @@ the base system and these steps will not be necessary. .. _conda environment: https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html .. _Miniforge3: https://github.com/conda-forge/miniforge#miniforge3 -Alternative compilers -===================== - -The command: - -.. prompt:: bash $ - - pip install --verbose --editable . - -will build scikit-learn using your default C/C++ compiler. If you want to build -scikit-learn with another compiler handled by ``distutils`` or by -``numpy.distutils``, use the following command: - -.. prompt:: bash $ - - python setup.py build_ext --compiler= -i build_clib --compiler= - -To see the list of available compilers run: - -.. prompt:: bash $ - - python setup.py build_ext --help-compiler - -If your compiler is not listed here, you can specify it via the ``CC`` and -``LDSHARED`` environment variables (does not work on windows): - -.. prompt:: bash $ - - CC= LDSHARED=" -shared" python setup.py build_ext -i - -Building with Intel C Compiler (ICC) using oneAPI on Linux ----------------------------------------------------------- - -Intel provides access to all of its oneAPI toolkits and packages through a -public APT repository. First you need to get and install the public key of this -repository: - -.. prompt:: bash $ - - wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB - sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB - rm GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB - -Then, add the oneAPI repository to your APT repositories: - -.. prompt:: bash $ - - sudo add-apt-repository "deb https://apt.repos.intel.com/oneapi all main" - sudo apt-get update - -Install ICC, packaged under the name -``intel-oneapi-compiler-dpcpp-cpp-and-cpp-classic``: - -.. prompt:: bash $ - - sudo apt-get install intel-oneapi-compiler-dpcpp-cpp-and-cpp-classic - -Before using ICC, you need to set up environment variables: - -.. prompt:: bash $ - - source /opt/intel/oneapi/setvars.sh - -Finally, you can build scikit-learn. For example on Linux x86_64: - -.. prompt:: bash $ - - python setup.py build_ext --compiler=intelem -i build_clib --compiler=intelem - Parallel builds =============== diff --git a/setup.cfg b/setup.cfg index 81fbbffadb233..fc0b5108ea044 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,3 +1,9 @@ +[options] +packages = find_namespace: + +[options.packages.find] +include = sklearn* + [aliases] test = pytest diff --git a/setup.py b/setup.py index a22df6e647a8e..d315998fa0787 100755 --- a/setup.py +++ b/setup.py @@ -6,13 +6,17 @@ import sys import os +from os.path import join import platform import shutil # We need to import setuptools before because it monkey-patches distutils import setuptools # noqa -from distutils.command.clean import clean as Clean -from distutils.command.sdist import sdist + +# from setuptools._distutils.command.clean import clean as Clean +from setuptools import Command, Extension +from setuptools.command.build_ext import build_ext +from setuptools import setup import traceback import importlib @@ -51,6 +55,7 @@ # does not need the compiled code import sklearn # noqa import sklearn._min_dependencies as min_deps # noqa +from sklearn._build_utils import _check_cython_version # noqa from sklearn.externals._packaging.version import parse as parse_version # noqa @@ -113,42 +118,22 @@ "sklearn._isotonic", ) -# For some commands, use setuptools -SETUPTOOLS_COMMANDS = { - "develop", - "release", - "bdist_egg", - "bdist_rpm", - "bdist_wininst", - "install_egg_info", - "build_sphinx", - "egg_info", - "easy_install", - "upload", - "bdist_wheel", - "--single-version-externally-managed", -} -if SETUPTOOLS_COMMANDS.intersection(sys.argv): - extra_setuptools_args = dict( - zip_safe=False, # the package can run out of an .egg file - include_package_data=True, - extras_require={ - key: min_deps.tag_to_packages[key] - for key in ["examples", "docs", "tests", "benchmark"] - }, - ) -else: - extra_setuptools_args = dict() - # Custom clean command to remove build artifacts -class CleanCommand(Clean): +class CleanCommand(Command): description = "Remove build artifacts from the source tree" + user_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + def run(self): - Clean.run(self) # Remove c files if we are not within a sdist package cwd = os.path.abspath(os.path.dirname(__file__)) remove_c_files = not os.path.exists(os.path.join(cwd, "PKG-INFO")) @@ -174,84 +159,53 @@ def run(self): shutil.rmtree(os.path.join(dirpath, dirname)) -cmdclass = {"clean": CleanCommand, "sdist": sdist} - # Custom build_ext command to set OpenMP compile flags depending on os and # compiler. Also makes it possible to set the parallelism level via # and environment variable (useful for the wheel building CI). # build_ext has to be imported after setuptools -try: - from numpy.distutils.command.build_ext import build_ext # noqa - - class build_ext_subclass(build_ext): - def finalize_options(self): - super().finalize_options() - if self.parallel is None: - # Do not override self.parallel if already defined by - # command-line flag (--parallel or -j) - - parallel = os.environ.get("SKLEARN_BUILD_PARALLEL") - if parallel: - self.parallel = int(parallel) - if self.parallel: - print("setting parallel=%d " % self.parallel) - def build_extensions(self): - from sklearn._build_utils.openmp_helpers import get_openmp_flag - for ext in self.extensions: - if ext.name in USE_NEWEST_NUMPY_C_API: - print(f"Using newest NumPy C API for extension {ext.name}") - ext.define_macros.append(DEFINE_MACRO_NUMPY_C_API) - else: - print( - f"Using old NumPy C API (version 1.7) for extension {ext.name}" - ) +class build_ext_subclass(build_ext): + def finalize_options(self): + build_ext.finalize_options(self) + if self.parallel is None: + # Do not override self.parallel if already defined by + # command-line flag (--parallel or -j) - if sklearn._OPENMP_SUPPORTED: - openmp_flag = get_openmp_flag(self.compiler) + parallel = os.environ.get("SKLEARN_BUILD_PARALLEL") + if parallel: + self.parallel = int(parallel) + if self.parallel: + print("setting parallel=%d " % self.parallel) - for e in self.extensions: - e.extra_compile_args += openmp_flag - e.extra_link_args += openmp_flag + def build_extensions(self): + from sklearn._build_utils.openmp_helpers import get_openmp_flag - build_ext.build_extensions(self) + for ext in self.extensions: + if ext.name in USE_NEWEST_NUMPY_C_API: + print(f"Using newest NumPy C API for extension {ext.name}") + ext.define_macros.append(DEFINE_MACRO_NUMPY_C_API) + else: + print(f"Using old NumPy C API (version 1.7) for extension {ext.name}") - cmdclass["build_ext"] = build_ext_subclass + if sklearn._OPENMP_SUPPORTED: + openmp_flag = get_openmp_flag(self.compiler) -except ImportError: - # Numpy should not be a dependency just to be able to introspect - # that python 3.8 is required. - pass - - -def configuration(parent_package="", top_path=None): - if os.path.exists("MANIFEST"): - os.remove("MANIFEST") + for e in self.extensions: + e.extra_compile_args += openmp_flag + e.extra_link_args += openmp_flag - from numpy.distutils.misc_util import Configuration - from sklearn._build_utils import _check_cython_version + build_ext.build_extensions(self) - config = Configuration(None, parent_package, top_path) - - # Avoid useless msg: - # "Ignoring attempt to set 'name' (from ... " - config.set_options( - ignore_setup_xxx_py=True, - assume_default_configuration=True, - delegate_options_to_subpackages=True, - quiet=True, - ) - - # Cython is required by config.add_subpackage for templated extensions - # that need the tempita sub-submodule. So check that we have the correct - # version of Cython so as to be able to raise a more informative error - # message from the start if it's not the case. - _check_cython_version() + def run(self): + self.run_command("build_clib") + build_ext.run(self) - config.add_subpackage("sklearn") - return config +cmdclass = { + "clean": CleanCommand, + "build_ext": build_ext_subclass, +} def check_package_status(package, min_version): @@ -294,7 +248,329 @@ def check_package_status(package, min_version): ) -def setup_package(): +extension_config = { + "__check_build": [ + {"sources": ["_check_build.pyx"]}, + ], + "": [ + {"sources": ["_isotonic.pyx"], "include_np": True}, + ], + "_loss": [ + {"sources": ["_loss.pyx.tp"], "include_np": True}, + ], + "cluster": [ + {"sources": ["_dbscan_inner.pyx"], "language": "c++", "include_np": True}, + {"sources": ["_hierarchical_fast.pyx"], "language": "c++", "include_np": True}, + {"sources": ["_k_means_common.pyx"], "include_np": True}, + {"sources": ["_k_means_lloyd.pyx"], "include_np": True}, + {"sources": ["_k_means_elkan.pyx"], "include_np": True}, + {"sources": ["_k_means_minibatch.pyx"], "include_np": True}, + ], + "datasets": [ + { + "sources": ["_svmlight_format_fast.pyx"], + "include_np": True, + "compile_for_pypy": False, + } + ], + "decomposition": [ + {"sources": ["_online_lda_fast.pyx"], "include_np": True}, + {"sources": ["_cdnmf_fast.pyx"], "include_np": True}, + ], + "ensemble": [ + {"sources": ["_gradient_boosting.pyx"], "include_np": True}, + ], + "ensemble._hist_gradient_boosting": [ + {"sources": ["_gradient_boosting.pyx"], "include_np": True}, + {"sources": ["histogram.pyx"], "include_np": True}, + {"sources": ["splitting.pyx"], "include_np": True}, + {"sources": ["_binning.pyx"], "include_np": True}, + {"sources": ["_predictor.pyx"], "include_np": True}, + {"sources": ["_bitset.pyx"], "include_np": True}, + {"sources": ["common.pyx"], "include_np": True}, + {"sources": ["utils.pyx"], "include_np": True}, + ], + "feature_extraction": [ + {"sources": ["_hashing_fast.pyx"], "language": "c++", "include_np": True}, + ], + "linear_model": [ + {"sources": ["_cd_fast.pyx"], "include_np": True}, + {"sources": ["_sgd_fast.pyx"], "include_np": True}, + {"sources": ["_sag_fast.pyx.tp"], "include_np": True}, + ], + "manifold": [ + {"sources": ["_utils.pyx"], "include_np": True}, + {"sources": ["_barnes_hut_tsne.pyx"], "include_np": True}, + ], + "metrics": [ + {"sources": ["_pairwise_fast.pyx"], "include_np": True}, + { + "sources": ["_dist_metrics.pyx.tp", "_dist_metrics.pxd.tp"], + "include_np": True, + }, + ], + "metrics.cluster": [ + {"sources": ["_expected_mutual_info_fast.pyx"], "include_np": True}, + ], + "metrics._pairwise_distances_reduction": [ + { + "sources": ["_datasets_pair.pyx.tp", "_datasets_pair.pxd.tp"], + "language": "c++", + "include_np": True, + "extra_compile_args": ["-std=c++11"], + }, + { + "sources": ["_gemm_term_computer.pyx.tp", "_gemm_term_computer.pxd.tp"], + "language": "c++", + "include_np": True, + "extra_compile_args": ["-std=c++11"], + }, + { + "sources": ["_base.pyx.tp", "_base.pxd.tp"], + "language": "c++", + "include_np": True, + "extra_compile_args": ["-std=c++11"], + }, + { + "sources": ["_argkmin.pyx.tp", "_argkmin.pxd.tp"], + "language": "c++", + "include_np": True, + "extra_compile_args": ["-std=c++11"], + }, + { + "sources": ["_radius_neighborhood.pyx.tp", "_radius_neighborhood.pxd.tp"], + "language": "c++", + "include_np": True, + "extra_compile_args": ["-std=c++11"], + }, + ], + "preprocessing": [ + {"sources": ["_csr_polynomial_expansion.pyx"], "include_np": True}, + ], + "neighbors": [ + {"sources": ["_ball_tree.pyx"], "include_np": True}, + {"sources": ["_kd_tree.pyx"], "include_np": True}, + {"sources": ["_partition_nodes.pyx"], "language": "c++", "include_np": True}, + {"sources": ["_quad_tree.pyx"], "include_np": True}, + ], + "svm": [ + { + "sources": ["_newrand.pyx"], + "include_np": True, + "include_dirs": [join("src", "newrand")], + "language": "c++", + # Use C++11 random number generator fix + "extra_compile_args": ["-std=c++11"], + }, + { + "sources": ["_libsvm.pyx"], + "depends": [ + join("src", "libsvm", "libsvm_helper.c"), + join("src", "libsvm", "libsvm_template.cpp"), + join("src", "libsvm", "svm.cpp"), + join("src", "libsvm", "svm.h"), + join("src", "newrand", "newrand.h"), + ], + "include_dirs": [ + join("src", "libsvm"), + join("src", "newrand"), + ], + "libraries": ["libsvm-skl"], + "extra_link_args": ["-lstdc++"], + "include_np": True, + }, + { + "sources": ["_liblinear.pyx"], + "libraries": ["liblinear-skl"], + "include_dirs": [ + join("src", "liblinear"), + join("src", "newrand"), + join("..", "utils"), + ], + "include_np": True, + "depends": [ + join("src", "liblinear", "tron.h"), + join("src", "liblinear", "linear.h"), + join("src", "liblinear", "liblinear_helper.c"), + join("src", "newrand", "newrand.h"), + ], + "extra_link_args": ["-lstdc++"], + }, + { + "sources": ["_libsvm_sparse.pyx"], + "libraries": ["libsvm-skl"], + "include_dirs": [ + join("src", "libsvm"), + join("src", "newrand"), + ], + "include_np": True, + "depends": [ + join("src", "libsvm", "svm.h"), + join("src", "newrand", "newrand.h"), + join("src", "libsvm", "libsvm_sparse_helper.c"), + ], + "extra_link_args": ["-lstdc++"], + }, + ], + "tree": [ + {"sources": ["_tree.pyx"], "language": "c++", "include_np": True}, + {"sources": ["_splitter.pyx"], "include_np": True}, + {"sources": ["_criterion.pyx"], "include_np": True}, + {"sources": ["_utils.pyx"], "include_np": True}, + ], + "utils": [ + {"sources": ["sparsefuncs_fast.pyx"], "include_np": True}, + {"sources": ["_cython_blas.pyx"]}, + {"sources": ["arrayfuncs.pyx"], "include_np": True}, + { + "sources": ["murmurhash.pyx", join("src", "MurmurHash3.cpp")], + "include_dirs": ["src"], + "include_np": True, + }, + {"sources": ["_fast_dict.pyx"], "language": "c++", "include_np": True}, + {"sources": ["_fast_dict.pyx"], "language": "c++", "include_np": True}, + {"sources": ["_openmp_helpers.pyx"]}, + {"sources": ["_seq_dataset.pyx.tp", "_seq_dataset.pxd.tp"], "include_np": True}, + { + "sources": ["_weight_vector.pyx.tp", "_weight_vector.pxd.tp"], + "include_np": True, + }, + {"sources": ["_random.pyx"], "include_np": True}, + {"sources": ["_logistic_sigmoid.pyx"], "include_np": True}, + {"sources": ["_readonly_array_wrapper.pyx"], "include_np": True}, + {"sources": ["_typedefs.pyx"], "include_np": True}, + {"sources": ["_heap.pyx"], "include_np": True}, + {"sources": ["_sorting.pyx"], "include_np": True}, + {"sources": ["_vector_sentinel.pyx"], "language": "c++", "include_np": True}, + {"sources": ["_isfinite.pyx"]}, + ], +} + +# Paths in `libraries` must be relative to the root directory because `libraries` is +# passed directly to `setup` +libraries = [ + ( + "libsvm-skl", + { + "sources": [ + join("sklearn", "svm", "src", "libsvm", "libsvm_template.cpp"), + ], + "depends": [ + join("sklearn", "svm", "src", "libsvm", "svm.cpp"), + join("sklearn", "svm", "src", "libsvm", "svm.h"), + join("sklearn", "svm", "src", "newrand", "newrand.h"), + ], + # Use C++11 to use the random number generator fix + "extra_compiler_args": ["-std=c++11"], + "extra_link_args": ["-lstdc++"], + }, + ), + ( + "liblinear-skl", + { + "sources": [ + join("sklearn", "svm", "src", "liblinear", "linear.cpp"), + join("sklearn", "svm", "src", "liblinear", "tron.cpp"), + ], + "depends": [ + join("sklearn", "svm", "src", "liblinear", "linear.h"), + join("sklearn", "svm", "src", "liblinear", "tron.h"), + join("sklearn", "svm", "src", "newrand", "newrand.h"), + ], + # Use C++11 to use the random number generator fix + "extra_compiler_args": ["-std=c++11"], + "extra_link_args": ["-lstdc++"], + }, + ), +] + + +def configure_extension_modules(): + # Skip cythonization as we do not want to include the generated + # C/C++ files in the release tarballs as they are not necessarily + # forward compatible with future versions of Python for instance. + if "sdist" in sys.argv or "--help" in sys.argv: + return [] + + from sklearn._build_utils import cythonize_extensions + from sklearn._build_utils import gen_from_templates + import numpy + + is_pypy = platform.python_implementation() == "PyPy" + np_include = numpy.get_include() + default_libraries = ["m"] if os.name == "posix" else [] + default_extra_compile_args = ["-O3"] + + cython_exts = [] + for submodule, extensions in extension_config.items(): + submodule_parts = submodule.split(".") + parent_dir = join("sklearn", *submodule_parts) + for extension in extensions: + if is_pypy and not extension.get("compile_for_pypy", True): + continue + + # Generate files with Tempita + tempita_sources = [] + sources = [] + for source in extension["sources"]: + source = join(parent_dir, source) + new_source_path, path_ext = os.path.splitext(source) + + if path_ext != ".tp": + sources.append(source) + continue + + # `source` is a source Tempita File + tempita_sources.append(source) + + # Do not include pxd files that were generated by tempita + if os.path.splitext(new_source_path)[-1] == ".pxd": + continue + sources.append(new_source_path) + + gen_from_templates(tempita_sources) + + # By convention, our extensions always use the name of the first source + source_name = os.path.splitext(os.path.basename(sources[0]))[0] + if submodule: + name_parts = ["sklearn", submodule, source_name] + else: + name_parts = ["sklearn", source_name] + name = ".".join(name_parts) + + # Make paths start from the root directory + include_dirs = [ + join(parent_dir, include_dir) + for include_dir in extension.get("include_dirs", []) + ] + if extension.get("include_np", False): + include_dirs.append(np_include) + + depends = [ + join(parent_dir, depend) for depend in extension.get("depends", []) + ] + + extra_compile_args = ( + extension.get("extra_compile_args", []) + default_extra_compile_args + ) + libraries_ext = extension.get("libraries", []) + default_libraries + + new_ext = Extension( + name=name, + sources=sources, + language=extension.get("language", None), + include_dirs=include_dirs, + libraries=libraries_ext, + depends=depends, + extra_link_args=extension.get("extra_link_args", None), + extra_compile_args=extra_compile_args, + ) + cython_exts.append(new_ext) + + return cythonize_extensions(cython_exts) + + +if __name__ == "__main__": python_requires = ">=3.8" required_python_version = (3, 8) @@ -333,23 +609,18 @@ def setup_package(): python_requires=python_requires, install_requires=min_deps.tag_to_packages["install"], package_data={"": ["*.pxd"]}, - **extra_setuptools_args, + zip_safe=False, # the package can run out of an .egg file + include_package_data=True, + extras_require={ + key: min_deps.tag_to_packages[key] + for key in ["examples", "docs", "tests", "benchmark"] + }, ) commands = [arg for arg in sys.argv[1:] if not arg.startswith("-")] - if all( + if not all( command in ("egg_info", "dist_info", "clean", "check") for command in commands ): - # These actions are required to succeed without Numpy for example when - # pip is used to install Scikit-learn when Numpy is not yet present in - # the system. - - # These commands use setup from setuptools - from setuptools import setup - - metadata["version"] = VERSION - metadata["packages"] = ["sklearn"] - else: if sys.version_info < required_python_version: required_version = "%d.%d" % required_python_version raise RuntimeError( @@ -359,29 +630,9 @@ def setup_package(): ) check_package_status("numpy", min_deps.NUMPY_MIN_VERSION) - check_package_status("scipy", min_deps.SCIPY_MIN_VERSION) - # These commands require the setup from numpy.distutils because they - # may use numpy.distutils compiler classes. - from numpy.distutils.core import setup - - # Monkeypatches CCompiler.spawn to prevent random wheel build errors on Windows - # The build errors on Windows was because msvccompiler spawn was not threadsafe - # This fixed can be removed when we build with numpy >= 1.22.2 on Windows. - # https://github.com/pypa/distutils/issues/5 - # https://github.com/scikit-learn/scikit-learn/issues/22310 - # https://github.com/numpy/numpy/pull/20640 - from numpy.distutils.ccompiler import replace_method - from distutils.ccompiler import CCompiler - from sklearn.externals._numpy_compiler_patch import CCompiler_spawn - - replace_method(CCompiler, "spawn", CCompiler_spawn) - - metadata["configuration"] = configuration - + _check_cython_version() + metadata["ext_modules"] = configure_extension_modules() + metadata["libraries"] = libraries setup(**metadata) - - -if __name__ == "__main__": - setup_package() diff --git a/sklearn/__check_build/setup.py b/sklearn/__check_build/setup.py deleted file mode 100644 index 2ff5bd24783e1..0000000000000 --- a/sklearn/__check_build/setup.py +++ /dev/null @@ -1,21 +0,0 @@ -# Author: Virgile Fritsch -# License: BSD 3 clause - -import numpy - - -def configuration(parent_package="", top_path=None): - from numpy.distutils.misc_util import Configuration - - config = Configuration("__check_build", parent_package, top_path) - config.add_extension( - "_check_build", sources=["_check_build.pyx"], include_dirs=[numpy.get_include()] - ) - - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration(top_path="").todict()) diff --git a/sklearn/_build_utils/__init__.py b/sklearn/_build_utils/__init__.py index d8206a3a715f8..0dc7a36db2a36 100644 --- a/sklearn/_build_utils/__init__.py +++ b/sklearn/_build_utils/__init__.py @@ -37,7 +37,7 @@ def _check_cython_version(): raise ValueError(message) -def cythonize_extensions(top_path, config): +def cythonize_extensions(extension): """Check that a recent Cython is available and cythonize extensions""" _check_cython_version() from Cython.Build import cythonize @@ -71,8 +71,8 @@ def cythonize_extensions(top_path, config): os.environ.get("SKLEARN_ENABLE_DEBUG_CYTHON_DIRECTIVES", "0") != "0" ) - config.ext_modules = cythonize( - config.ext_modules, + return cythonize( + extension, nthreads=n_jobs, compile_time_env={ "SKLEARN_OPENMP_PARALLELISM_ENABLED": sklearn._OPENMP_SUPPORTED diff --git a/sklearn/_build_utils/pre_build_helpers.py b/sklearn/_build_utils/pre_build_helpers.py index 0a2a942f7991e..9068390f2afad 100644 --- a/sklearn/_build_utils/pre_build_helpers.py +++ b/sklearn/_build_utils/pre_build_helpers.py @@ -5,52 +5,15 @@ import glob import tempfile import textwrap -import setuptools # noqa import subprocess -import warnings - -from distutils.dist import Distribution -from distutils.sysconfig import customize_compiler - -# NumPy 1.23 deprecates numpy.distutils -with warnings.catch_warnings(): - warnings.filterwarnings("ignore", category=DeprecationWarning) - from numpy.distutils.ccompiler import new_compiler - from numpy.distutils.command.config_compiler import config_cc - - -def _get_compiler(): - """Get a compiler equivalent to the one that will be used to build sklearn - - Handles compiler specified as follows: - - python setup.py build_ext --compiler= - - CC= python setup.py build_ext - """ - dist = Distribution( - { - "script_name": os.path.basename(sys.argv[0]), - "script_args": sys.argv[1:], - "cmdclass": {"config_cc": config_cc}, - } - ) - dist.parse_config_files() - dist.parse_command_line() - - cmd_opts = dist.command_options.get("build_ext") - if cmd_opts is not None and "compiler" in cmd_opts: - compiler = cmd_opts["compiler"][1] - else: - compiler = None - ccompiler = new_compiler(compiler=compiler) - customize_compiler(ccompiler) - - return ccompiler +from setuptools.command.build_ext import customize_compiler, new_compiler def compile_test_program(code, extra_preargs=[], extra_postargs=[]): """Check that some C code can be compiled and run""" - ccompiler = _get_compiler() + ccompiler = new_compiler() + customize_compiler(ccompiler) # extra_(pre/post)args can be a callable to make it possible to get its # value from the compiler diff --git a/sklearn/_loss/setup.py b/sklearn/_loss/setup.py deleted file mode 100644 index 2a2d2b5f13b8a..0000000000000 --- a/sklearn/_loss/setup.py +++ /dev/null @@ -1,25 +0,0 @@ -import numpy -from numpy.distutils.misc_util import Configuration -from sklearn._build_utils import gen_from_templates - - -def configuration(parent_package="", top_path=None): - config = Configuration("_loss", parent_package, top_path) - - # generate _loss.pyx from template - templates = ["sklearn/_loss/_loss.pyx.tp"] - gen_from_templates(templates) - - config.add_extension( - "_loss", - sources=["_loss.pyx"], - include_dirs=[numpy.get_include()], - # define_macros=[("NPY_NO_DEPRECATED_API", "NPY_1_7_API_VERSION")], - ) - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration().todict()) diff --git a/sklearn/cluster/setup.py b/sklearn/cluster/setup.py deleted file mode 100644 index c26872fd750a0..0000000000000 --- a/sklearn/cluster/setup.py +++ /dev/null @@ -1,68 +0,0 @@ -# Author: Alexandre Gramfort -# License: BSD 3 clause -import os - -import numpy - - -def configuration(parent_package="", top_path=None): - from numpy.distutils.misc_util import Configuration - - libraries = [] - if os.name == "posix": - libraries.append("m") - - config = Configuration("cluster", parent_package, top_path) - - config.add_extension( - "_dbscan_inner", - sources=["_dbscan_inner.pyx"], - include_dirs=[numpy.get_include()], - language="c++", - ) - - config.add_extension( - "_hierarchical_fast", - sources=["_hierarchical_fast.pyx"], - language="c++", - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_extension( - "_k_means_common", - sources=["_k_means_common.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_extension( - "_k_means_lloyd", - sources=["_k_means_lloyd.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_extension( - "_k_means_elkan", - sources=["_k_means_elkan.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_extension( - "_k_means_minibatch", - sources=["_k_means_minibatch.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_subpackage("tests") - - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration(top_path="").todict()) diff --git a/sklearn/datasets/setup.py b/sklearn/datasets/setup.py deleted file mode 100644 index a75f14a083297..0000000000000 --- a/sklearn/datasets/setup.py +++ /dev/null @@ -1,27 +0,0 @@ -import numpy -import os -import platform - - -def configuration(parent_package="", top_path=None): - from numpy.distutils.misc_util import Configuration - - config = Configuration("datasets", parent_package, top_path) - config.add_data_dir("data") - config.add_data_dir("descr") - config.add_data_dir("images") - config.add_data_dir(os.path.join("tests", "data")) - if platform.python_implementation() != "PyPy": - config.add_extension( - "_svmlight_format_fast", - sources=["_svmlight_format_fast.pyx"], - include_dirs=[numpy.get_include()], - ) - config.add_subpackage("tests") - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration(top_path="").todict()) diff --git a/sklearn/decomposition/setup.py b/sklearn/decomposition/setup.py deleted file mode 100644 index 2937f282b755d..0000000000000 --- a/sklearn/decomposition/setup.py +++ /dev/null @@ -1,35 +0,0 @@ -import os -import numpy -from numpy.distutils.misc_util import Configuration - - -def configuration(parent_package="", top_path=None): - config = Configuration("decomposition", parent_package, top_path) - - libraries = [] - if os.name == "posix": - libraries.append("m") - - config.add_extension( - "_online_lda_fast", - sources=["_online_lda_fast.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_extension( - "_cdnmf_fast", - sources=["_cdnmf_fast.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_subpackage("tests") - - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration().todict()) diff --git a/sklearn/ensemble/setup.py b/sklearn/ensemble/setup.py deleted file mode 100644 index a9594757dbeb2..0000000000000 --- a/sklearn/ensemble/setup.py +++ /dev/null @@ -1,73 +0,0 @@ -import numpy -from numpy.distutils.misc_util import Configuration - - -def configuration(parent_package="", top_path=None): - config = Configuration("ensemble", parent_package, top_path) - - config.add_extension( - "_gradient_boosting", - sources=["_gradient_boosting.pyx"], - include_dirs=[numpy.get_include()], - ) - - config.add_subpackage("tests") - - # Histogram-based gradient boosting files - config.add_extension( - "_hist_gradient_boosting._gradient_boosting", - sources=["_hist_gradient_boosting/_gradient_boosting.pyx"], - include_dirs=[numpy.get_include()], - ) - - config.add_extension( - "_hist_gradient_boosting.histogram", - sources=["_hist_gradient_boosting/histogram.pyx"], - include_dirs=[numpy.get_include()], - ) - - config.add_extension( - "_hist_gradient_boosting.splitting", - sources=["_hist_gradient_boosting/splitting.pyx"], - include_dirs=[numpy.get_include()], - ) - - config.add_extension( - "_hist_gradient_boosting._binning", - sources=["_hist_gradient_boosting/_binning.pyx"], - include_dirs=[numpy.get_include()], - ) - - config.add_extension( - "_hist_gradient_boosting._predictor", - sources=["_hist_gradient_boosting/_predictor.pyx"], - include_dirs=[numpy.get_include()], - ) - - config.add_extension( - "_hist_gradient_boosting._bitset", - sources=["_hist_gradient_boosting/_bitset.pyx"], - include_dirs=[numpy.get_include()], - ) - - config.add_extension( - "_hist_gradient_boosting.common", - sources=["_hist_gradient_boosting/common.pyx"], - include_dirs=[numpy.get_include()], - ) - - config.add_extension( - "_hist_gradient_boosting.utils", - sources=["_hist_gradient_boosting/utils.pyx"], - include_dirs=[numpy.get_include()], - ) - - config.add_subpackage("_hist_gradient_boosting.tests") - - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration().todict()) diff --git a/sklearn/externals/_numpy_compiler_patch.py b/sklearn/externals/_numpy_compiler_patch.py deleted file mode 100644 index a424d8e99a8ef..0000000000000 --- a/sklearn/externals/_numpy_compiler_patch.py +++ /dev/null @@ -1,139 +0,0 @@ -# Copyright (c) 2005-2022, NumPy Developers. -# All rights reserved. - -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: - -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. - -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following -# disclaimer in the documentation and/or other materials provided -# with the distribution. - -# * Neither the name of the NumPy Developers nor the names of any -# contributors may be used to endorse or promote products derived -# from this software without specific prior written permission. - -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -import os -import sys -import subprocess -import re -from distutils.errors import DistutilsExecError - -from numpy.distutils import log - - -def is_sequence(seq): - if isinstance(seq, str): - return False - try: - len(seq) - except Exception: - return False - return True - - -def forward_bytes_to_stdout(val): - """ - Forward bytes from a subprocess call to the console, without attempting to - decode them. - - The assumption is that the subprocess call already returned bytes in - a suitable encoding. - """ - if hasattr(sys.stdout, "buffer"): - # use the underlying binary output if there is one - sys.stdout.buffer.write(val) - elif hasattr(sys.stdout, "encoding"): - # round-trip the encoding if necessary - sys.stdout.write(val.decode(sys.stdout.encoding)) - else: - # make a best-guess at the encoding - sys.stdout.write(val.decode("utf8", errors="replace")) - - -def CCompiler_spawn(self, cmd, display=None, env=None): - """ - Execute a command in a sub-process. - - Parameters - ---------- - cmd : str - The command to execute. - display : str or sequence of str, optional - The text to add to the log file kept by `numpy.distutils`. - If not given, `display` is equal to `cmd`. - env: a dictionary for environment variables, optional - - Returns - ------- - None - - Raises - ------ - DistutilsExecError - If the command failed, i.e. the exit status was not 0. - - """ - env = env if env is not None else dict(os.environ) - if display is None: - display = cmd - if is_sequence(display): - display = " ".join(list(display)) - log.info(display) - try: - if self.verbose: - subprocess.check_output(cmd, env=env) - else: - subprocess.check_output(cmd, stderr=subprocess.STDOUT, env=env) - except subprocess.CalledProcessError as exc: - o = exc.output - s = exc.returncode - except OSError as e: - # OSError doesn't have the same hooks for the exception - # output, but exec_command() historically would use an - # empty string for EnvironmentError (base class for - # OSError) - # o = b'' - # still that would make the end-user lost in translation! - o = f"\n\n{e}\n\n\n" - try: - o = o.encode(sys.stdout.encoding) - except AttributeError: - o = o.encode("utf8") - # status previously used by exec_command() for parent - # of OSError - s = 127 - else: - # use a convenience return here so that any kind of - # caught exception will execute the default code after the - # try / except block, which handles various exceptions - return None - - if is_sequence(cmd): - cmd = " ".join(list(cmd)) - - if self.verbose: - forward_bytes_to_stdout(o) - - if re.search(b"Too many open files", o): - msg = "\nTry rerunning setup command until build succeeds." - else: - msg = "" - raise DistutilsExecError( - 'Command "%s" failed with exit status %d%s' % (cmd, s, msg) - ) diff --git a/sklearn/feature_extraction/setup.py b/sklearn/feature_extraction/setup.py deleted file mode 100644 index a7f2ff0f9dcee..0000000000000 --- a/sklearn/feature_extraction/setup.py +++ /dev/null @@ -1,22 +0,0 @@ -import os - - -def configuration(parent_package="", top_path=None): - import numpy - from numpy.distutils.misc_util import Configuration - - config = Configuration("feature_extraction", parent_package, top_path) - libraries = [] - if os.name == "posix": - libraries.append("m") - - config.add_extension( - "_hashing_fast", - sources=["_hashing_fast.pyx"], - include_dirs=[numpy.get_include()], - language="c++", - libraries=libraries, - ) - config.add_subpackage("tests") - - return config diff --git a/sklearn/inspection/setup.py b/sklearn/inspection/setup.py deleted file mode 100644 index d869e4aefa1b2..0000000000000 --- a/sklearn/inspection/setup.py +++ /dev/null @@ -1,18 +0,0 @@ -from numpy.distutils.misc_util import Configuration - - -def configuration(parent_package="", top_path=None): - config = Configuration("inspection", parent_package, top_path) - - config.add_subpackage("_plot") - config.add_subpackage("_plot.tests") - - config.add_subpackage("tests") - - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration().todict()) diff --git a/sklearn/linear_model/setup.py b/sklearn/linear_model/setup.py deleted file mode 100644 index 74d7d9e2b05ea..0000000000000 --- a/sklearn/linear_model/setup.py +++ /dev/null @@ -1,49 +0,0 @@ -import os -import numpy - -from sklearn._build_utils import gen_from_templates - - -def configuration(parent_package="", top_path=None): - from numpy.distutils.misc_util import Configuration - - config = Configuration("linear_model", parent_package, top_path) - - libraries = [] - if os.name == "posix": - libraries.append("m") - - config.add_extension( - "_cd_fast", - sources=["_cd_fast.pyx"], - include_dirs=numpy.get_include(), - libraries=libraries, - ) - - config.add_extension( - "_sgd_fast", - sources=["_sgd_fast.pyx"], - include_dirs=numpy.get_include(), - libraries=libraries, - ) - - # generate sag_fast from template - templates = ["sklearn/linear_model/_sag_fast.pyx.tp"] - gen_from_templates(templates) - - config.add_extension( - "_sag_fast", sources=["_sag_fast.pyx"], include_dirs=numpy.get_include() - ) - - # add other directories - config.add_subpackage("tests") - config.add_subpackage("_glm") - config.add_subpackage("_glm/tests") - - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration(top_path="").todict()) diff --git a/sklearn/manifold/setup.py b/sklearn/manifold/setup.py deleted file mode 100644 index b20484ea64c99..0000000000000 --- a/sklearn/manifold/setup.py +++ /dev/null @@ -1,39 +0,0 @@ -import os - -import numpy - - -def configuration(parent_package="", top_path=None): - from numpy.distutils.misc_util import Configuration - - config = Configuration("manifold", parent_package, top_path) - - libraries = [] - if os.name == "posix": - libraries.append("m") - - config.add_extension( - "_utils", - sources=["_utils.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - extra_compile_args=["-O3"], - ) - - config.add_extension( - "_barnes_hut_tsne", - sources=["_barnes_hut_tsne.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - extra_compile_args=["-O3"], - ) - - config.add_subpackage("tests") - - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration().todict()) diff --git a/sklearn/metrics/_pairwise_distances_reduction/setup.py b/sklearn/metrics/_pairwise_distances_reduction/setup.py deleted file mode 100644 index f55ec659b5821..0000000000000 --- a/sklearn/metrics/_pairwise_distances_reduction/setup.py +++ /dev/null @@ -1,55 +0,0 @@ -import os - -import numpy as np -from numpy.distutils.misc_util import Configuration - -from sklearn._build_utils import gen_from_templates - - -def configuration(parent_package="", top_path=None): - config = Configuration("_pairwise_distances_reduction", parent_package, top_path) - libraries = [] - if os.name == "posix": - libraries.append("m") - - templates = [ - "sklearn/metrics/_pairwise_distances_reduction/_datasets_pair.pyx.tp", - "sklearn/metrics/_pairwise_distances_reduction/_datasets_pair.pxd.tp", - "sklearn/metrics/_pairwise_distances_reduction/_gemm_term_computer.pyx.tp", - "sklearn/metrics/_pairwise_distances_reduction/_gemm_term_computer.pxd.tp", - "sklearn/metrics/_pairwise_distances_reduction/_base.pyx.tp", - "sklearn/metrics/_pairwise_distances_reduction/_base.pxd.tp", - "sklearn/metrics/_pairwise_distances_reduction/_argkmin.pyx.tp", - "sklearn/metrics/_pairwise_distances_reduction/_argkmin.pxd.tp", - "sklearn/metrics/_pairwise_distances_reduction/_radius_neighborhood.pyx.tp", - "sklearn/metrics/_pairwise_distances_reduction/_radius_neighborhood.pxd.tp", - ] - - gen_from_templates(templates) - - cython_sources = [ - "_datasets_pair.pyx", - "_gemm_term_computer.pyx", - "_base.pyx", - "_argkmin.pyx", - "_radius_neighborhood.pyx", - ] - - for source_file in cython_sources: - private_extension_name = source_file.replace(".pyx", "") - config.add_extension( - name=private_extension_name, - sources=[source_file], - include_dirs=[np.get_include()], - language="c++", - libraries=libraries, - extra_compile_args=["-std=c++11"], - ) - - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration().todict()) diff --git a/sklearn/metrics/cluster/setup.py b/sklearn/metrics/cluster/setup.py deleted file mode 100644 index 1d2b0b497aa4e..0000000000000 --- a/sklearn/metrics/cluster/setup.py +++ /dev/null @@ -1,27 +0,0 @@ -import os - -import numpy -from numpy.distutils.misc_util import Configuration - - -def configuration(parent_package="", top_path=None): - config = Configuration("cluster", parent_package, top_path) - libraries = [] - if os.name == "posix": - libraries.append("m") - config.add_extension( - "_expected_mutual_info_fast", - sources=["_expected_mutual_info_fast.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_subpackage("tests") - - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration().todict()) diff --git a/sklearn/neighbors/setup.py b/sklearn/neighbors/setup.py deleted file mode 100644 index aa19ba501b18d..0000000000000 --- a/sklearn/neighbors/setup.py +++ /dev/null @@ -1,44 +0,0 @@ -import os - - -def configuration(parent_package="", top_path=None): - import numpy - from numpy.distutils.misc_util import Configuration - - config = Configuration("neighbors", parent_package, top_path) - libraries = [] - if os.name == "posix": - libraries.append("m") - - config.add_extension( - "_ball_tree", - sources=["_ball_tree.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_extension( - "_kd_tree", - sources=["_kd_tree.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_extension( - "_partition_nodes", - sources=["_partition_nodes.pyx"], - include_dirs=[numpy.get_include()], - language="c++", - libraries=libraries, - ) - - config.add_extension( - "_quad_tree", - sources=["_quad_tree.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_subpackage("tests") - - return config diff --git a/sklearn/preprocessing/setup.py b/sklearn/preprocessing/setup.py deleted file mode 100644 index a9053bd0b97f9..0000000000000 --- a/sklearn/preprocessing/setup.py +++ /dev/null @@ -1,22 +0,0 @@ -import os - - -def configuration(parent_package="", top_path=None): - import numpy - from numpy.distutils.misc_util import Configuration - - config = Configuration("preprocessing", parent_package, top_path) - libraries = [] - if os.name == "posix": - libraries.append("m") - - config.add_extension( - "_csr_polynomial_expansion", - sources=["_csr_polynomial_expansion.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_subpackage("tests") - - return config diff --git a/sklearn/setup.py b/sklearn/setup.py deleted file mode 100644 index 874bdbbcbed43..0000000000000 --- a/sklearn/setup.py +++ /dev/null @@ -1,93 +0,0 @@ -import sys -import os - -from sklearn._build_utils import cythonize_extensions - - -def configuration(parent_package="", top_path=None): - from numpy.distutils.misc_util import Configuration - import numpy - - libraries = [] - if os.name == "posix": - libraries.append("m") - - config = Configuration("sklearn", parent_package, top_path) - - # submodules with build utilities - config.add_subpackage("__check_build") - config.add_subpackage("_build_utils") - - # submodules which do not have their own setup.py - # we must manually add sub-submodules & tests - config.add_subpackage("compose") - config.add_subpackage("compose/tests") - config.add_subpackage("covariance") - config.add_subpackage("covariance/tests") - config.add_subpackage("cross_decomposition") - config.add_subpackage("cross_decomposition/tests") - config.add_subpackage("feature_selection") - config.add_subpackage("feature_selection/tests") - config.add_subpackage("gaussian_process") - config.add_subpackage("gaussian_process/tests") - config.add_subpackage("impute") - config.add_subpackage("impute/tests") - config.add_subpackage("inspection") - config.add_subpackage("inspection/tests") - config.add_subpackage("mixture") - config.add_subpackage("mixture/tests") - config.add_subpackage("model_selection") - config.add_subpackage("model_selection/tests") - config.add_subpackage("neural_network") - config.add_subpackage("neural_network/tests") - config.add_subpackage("preprocessing") - config.add_subpackage("preprocessing/tests") - config.add_subpackage("semi_supervised") - config.add_subpackage("semi_supervised/tests") - config.add_subpackage("experimental") - config.add_subpackage("experimental/tests") - config.add_subpackage("ensemble/_hist_gradient_boosting") - config.add_subpackage("ensemble/_hist_gradient_boosting/tests") - config.add_subpackage("externals") - config.add_subpackage("externals/_packaging") - - # submodules which have their own setup.py - config.add_subpackage("_loss") - config.add_subpackage("_loss/tests") - config.add_subpackage("cluster") - config.add_subpackage("datasets") - config.add_subpackage("decomposition") - config.add_subpackage("ensemble") - config.add_subpackage("feature_extraction") - config.add_subpackage("manifold") - config.add_subpackage("metrics") - config.add_subpackage("neighbors") - config.add_subpackage("tree") - config.add_subpackage("utils") - config.add_subpackage("svm") - config.add_subpackage("linear_model") - - # add cython extension module for isotonic regression - config.add_extension( - "_isotonic", - sources=["_isotonic.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - # add the test directory - config.add_subpackage("tests") - - # Skip cythonization as we do not want to include the generated - # C/C++ files in the release tarballs as they are not necessarily - # forward compatible with future versions of Python for instance. - if "sdist" not in sys.argv: - cythonize_extensions(top_path, config) - - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration(top_path="").todict()) diff --git a/sklearn/svm/setup.py b/sklearn/svm/setup.py deleted file mode 100644 index d5f94d8a11181..0000000000000 --- a/sklearn/svm/setup.py +++ /dev/null @@ -1,134 +0,0 @@ -import os -from os.path import join -import numpy - - -def configuration(parent_package="", top_path=None): - from numpy.distutils.misc_util import Configuration - - config = Configuration("svm", parent_package, top_path) - - config.add_subpackage("tests") - - # newrand wrappers - config.add_extension( - "_newrand", - sources=["_newrand.pyx"], - include_dirs=[numpy.get_include(), join("src", "newrand")], - depends=[join("src", "newrand", "newrand.h")], - language="c++", - # Use C++11 random number generator fix - extra_compile_args=["-std=c++11"], - ) - - # Section LibSVM - - # we compile both libsvm and libsvm_sparse - config.add_library( - "libsvm-skl", - sources=[join("src", "libsvm", "libsvm_template.cpp")], - depends=[ - join("src", "libsvm", "svm.cpp"), - join("src", "libsvm", "svm.h"), - join("src", "newrand", "newrand.h"), - ], - # Force C++ linking in case gcc is picked up instead - # of g++ under windows with some versions of MinGW - extra_link_args=["-lstdc++"], - # Use C++11 to use the random number generator fix - extra_compiler_args=["-std=c++11"], - ) - - libsvm_sources = ["_libsvm.pyx"] - libsvm_depends = [ - join("src", "libsvm", "libsvm_helper.c"), - join("src", "libsvm", "libsvm_template.cpp"), - join("src", "libsvm", "svm.cpp"), - join("src", "libsvm", "svm.h"), - join("src", "newrand", "newrand.h"), - ] - - config.add_extension( - "_libsvm", - sources=libsvm_sources, - include_dirs=[ - numpy.get_include(), - join("src", "libsvm"), - join("src", "newrand"), - ], - libraries=["libsvm-skl"], - depends=libsvm_depends, - ) - - # liblinear module - libraries = [] - if os.name == "posix": - libraries.append("m") - - # precompile liblinear to use C++11 flag - config.add_library( - "liblinear-skl", - sources=[ - join("src", "liblinear", "linear.cpp"), - join("src", "liblinear", "tron.cpp"), - ], - depends=[ - join("src", "liblinear", "linear.h"), - join("src", "liblinear", "tron.h"), - join("src", "newrand", "newrand.h"), - ], - # Force C++ linking in case gcc is picked up instead - # of g++ under windows with some versions of MinGW - extra_link_args=["-lstdc++"], - # Use C++11 to use the random number generator fix - extra_compiler_args=["-std=c++11"], - ) - - liblinear_sources = ["_liblinear.pyx"] - liblinear_depends = [ - join("src", "liblinear", "*.h"), - join("src", "newrand", "newrand.h"), - join("src", "liblinear", "liblinear_helper.c"), - ] - - config.add_extension( - "_liblinear", - sources=liblinear_sources, - libraries=["liblinear-skl"] + libraries, - include_dirs=[ - join(".", "src", "liblinear"), - join(".", "src", "newrand"), - join("..", "utils"), - numpy.get_include(), - ], - depends=liblinear_depends, - # extra_compile_args=['-O0 -fno-inline'], - ) - - # end liblinear module - - # this should go *after* libsvm-skl - libsvm_sparse_sources = ["_libsvm_sparse.pyx"] - config.add_extension( - "_libsvm_sparse", - libraries=["libsvm-skl"], - sources=libsvm_sparse_sources, - include_dirs=[ - numpy.get_include(), - join("src", "libsvm"), - join("src", "newrand"), - ], - depends=[ - join("src", "libsvm", "svm.h"), - join("src", "newrand", "newrand.h"), - join("src", "libsvm", "libsvm_sparse_helper.c"), - ], - ) - - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration(top_path="").todict()) diff --git a/sklearn/tree/setup.py b/sklearn/tree/setup.py deleted file mode 100644 index 20d5f64199e0c..0000000000000 --- a/sklearn/tree/setup.py +++ /dev/null @@ -1,50 +0,0 @@ -import os - -import numpy -from numpy.distutils.misc_util import Configuration - - -def configuration(parent_package="", top_path=None): - config = Configuration("tree", parent_package, top_path) - libraries = [] - if os.name == "posix": - libraries.append("m") - config.add_extension( - "_tree", - sources=["_tree.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - language="c++", - extra_compile_args=["-O3"], - ) - config.add_extension( - "_splitter", - sources=["_splitter.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - extra_compile_args=["-O3"], - ) - config.add_extension( - "_criterion", - sources=["_criterion.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - extra_compile_args=["-O3"], - ) - config.add_extension( - "_utils", - sources=["_utils.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - extra_compile_args=["-O3"], - ) - - config.add_subpackage("tests") - - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration().todict()) diff --git a/sklearn/utils/setup.py b/sklearn/utils/setup.py deleted file mode 100644 index 915a8efeb2e01..0000000000000 --- a/sklearn/utils/setup.py +++ /dev/null @@ -1,133 +0,0 @@ -import os -from os.path import join - -from sklearn._build_utils import gen_from_templates - - -def configuration(parent_package="", top_path=None): - import numpy - from numpy.distutils.misc_util import Configuration - - config = Configuration("utils", parent_package, top_path) - - libraries = [] - if os.name == "posix": - libraries.append("m") - - config.add_extension( - "sparsefuncs_fast", sources=["sparsefuncs_fast.pyx"], libraries=libraries - ) - - config.add_extension( - "_cython_blas", sources=["_cython_blas.pyx"], libraries=libraries - ) - - config.add_extension( - "arrayfuncs", - sources=["arrayfuncs.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_extension( - "murmurhash", - sources=["murmurhash.pyx", join("src", "MurmurHash3.cpp")], - include_dirs=["src"], - ) - - config.add_extension( - "_fast_dict", - sources=["_fast_dict.pyx"], - language="c++", - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_extension( - "_openmp_helpers", sources=["_openmp_helpers.pyx"], libraries=libraries - ) - - # generate files from a template - templates = [ - "sklearn/utils/_seq_dataset.pyx.tp", - "sklearn/utils/_seq_dataset.pxd.tp", - "sklearn/utils/_weight_vector.pyx.tp", - "sklearn/utils/_weight_vector.pxd.tp", - ] - - gen_from_templates(templates) - - config.add_extension( - "_seq_dataset", sources=["_seq_dataset.pyx"], include_dirs=[numpy.get_include()] - ) - - config.add_extension( - "_weight_vector", - sources=["_weight_vector.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_extension( - "_random", - sources=["_random.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_extension( - "_logistic_sigmoid", - sources=["_logistic_sigmoid.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_extension( - "_readonly_array_wrapper", - sources=["_readonly_array_wrapper.pyx"], - libraries=libraries, - ) - - config.add_extension( - "_typedefs", - sources=["_typedefs.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - ) - - config.add_extension( - "_heap", - sources=["_heap.pyx"], - libraries=libraries, - ) - - config.add_extension( - "_sorting", - sources=["_sorting.pyx"], - include_dirs=[numpy.get_include()], - language="c++", - libraries=libraries, - ) - - config.add_extension( - "_vector_sentinel", - sources=["_vector_sentinel.pyx"], - include_dirs=[numpy.get_include()], - libraries=libraries, - language="c++", - ) - config.add_extension( - "_isfinite", - sources=["_isfinite.pyx"], - libraries=libraries, - ) - - config.add_subpackage("tests") - - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration(top_path="").todict()) From 9ba6a4787742307f46c205d87f55179740d772ce Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Sun, 2 Oct 2022 18:04:41 -0400 Subject: [PATCH 02/26] CI [cd build] From 68f84a1a7f4b1ae6f9aed434b225fdf33bda46ef Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Sun, 2 Oct 2022 18:07:02 -0400 Subject: [PATCH 03/26] CLN Reduce diff --- setup.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index d315998fa0787..6e8354c13cc78 100755 --- a/setup.py +++ b/setup.py @@ -570,7 +570,7 @@ def configure_extension_modules(): return cythonize_extensions(cython_exts) -if __name__ == "__main__": +def setup_package(): python_requires = ">=3.8" required_python_version = (3, 8) @@ -636,3 +636,7 @@ def configure_extension_modules(): metadata["ext_modules"] = configure_extension_modules() metadata["libraries"] = libraries setup(**metadata) + + +if __name__ == "__main__": + setup_package() From c524327b20b5a05b9158d82895de78524aa77796 Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Sun, 2 Oct 2022 18:07:28 -0400 Subject: [PATCH 04/26] CI [cd build] From 0c510bf2159a991ce0b39cb4878ccff6eba7a8a8 Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Wed, 5 Oct 2022 15:10:13 -0400 Subject: [PATCH 05/26] FIX Removes remaining setup.py file [cd build] --- sklearn/metrics/setup.py | 47 ---------------------------------------- 1 file changed, 47 deletions(-) delete mode 100644 sklearn/metrics/setup.py diff --git a/sklearn/metrics/setup.py b/sklearn/metrics/setup.py deleted file mode 100644 index 9aab190c69992..0000000000000 --- a/sklearn/metrics/setup.py +++ /dev/null @@ -1,47 +0,0 @@ -import os -import numpy as np - -from numpy.distutils.misc_util import Configuration - -from sklearn._build_utils import gen_from_templates - - -def configuration(parent_package="", top_path=None): - config = Configuration("metrics", parent_package, top_path) - - libraries = [] - if os.name == "posix": - libraries.append("m") - - config.add_subpackage("_plot") - config.add_subpackage("_plot.tests") - config.add_subpackage("cluster") - config.add_subpackage("_pairwise_distances_reduction") - - config.add_extension( - "_pairwise_fast", sources=["_pairwise_fast.pyx"], libraries=libraries - ) - - templates = [ - "sklearn/metrics/_dist_metrics.pyx.tp", - "sklearn/metrics/_dist_metrics.pxd.tp", - ] - - gen_from_templates(templates) - - config.add_extension( - "_dist_metrics", - sources=["_dist_metrics.pyx"], - include_dirs=[np.get_include()], - libraries=libraries, - ) - - config.add_subpackage("tests") - - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration().todict()) From d0f4a7af7742d691b007a7950e99bdd353e52cd4 Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Tue, 11 Oct 2022 20:22:15 -0400 Subject: [PATCH 06/26] CI [cd build gh] From 979d7646b5e641fa696bf1441fa58a81a3c5cd54 Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Fri, 14 Oct 2022 14:23:59 -0400 Subject: [PATCH 07/26] CI [cd build gh] From 7baf7d0a6e75d4cafeda47bc7a55dac1c2c3604b Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Fri, 14 Oct 2022 16:32:20 -0400 Subject: [PATCH 08/26] CI Remove BUILD_WITH_ICC everywhere [cd build gh] --- azure-pipelines.yml | 23 ----------------------- build_tools/azure/install.sh | 16 ---------------- build_tools/azure/test_docs.sh | 4 ---- build_tools/azure/test_script.sh | 4 ---- 4 files changed, 47 deletions(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 169e07e76d918..3f6b96dff9f60 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -106,28 +106,6 @@ jobs: LOCK_FILE: './build_tools/azure/python_nogil_lock.txt' COVERAGE: 'false' -# XXX: Intel C++ compiler support removed when migrating away from numpy.distuils -# Check compilation with intel C++ compiler (ICC) -# - template: build_tools/azure/posix.yml -# parameters: -# name: Linux_Nightly_ICC -# vmImage: ubuntu-20.04 -# dependsOn: [git_commit, linting] -# condition: | -# and( -# succeeded(), -# not(contains(dependencies['git_commit']['outputs']['commit.message'], '[ci skip]')), -# or(eq(variables['Build.Reason'], 'Schedule'), -# contains(dependencies['git_commit']['outputs']['commit.message'], '[icc-build]') -# ) -# ) -# matrix: -# pylatest_conda_forge_mkl: -# DISTRIB: 'conda' -# LOCK_FILE: 'build_tools/azure/pylatest_conda_forge_mkl_no_coverage_linux-64_conda.lock' -# COVERAGE: 'false' -# BUILD_WITH_ICC: 'true' - - template: build_tools/azure/posix-docker.yml parameters: name: Linux_Nightly_PyPy @@ -183,7 +161,6 @@ jobs: DISTRIB: 'conda' LOCK_FILE: './build_tools/azure/py38_conda_forge_openblas_ubuntu_2204_linux-64_conda.lock' COVERAGE: 'false' - BUILD_WITH_ICC: 'false' SKLEARN_TESTS_GLOBAL_RANDOM_SEED: '0' # non-default seed - template: build_tools/azure/posix.yml diff --git a/build_tools/azure/install.sh b/build_tools/azure/install.sh index 66351ea867ec8..08bc126066c9d 100755 --- a/build_tools/azure/install.sh +++ b/build_tools/azure/install.sh @@ -59,15 +59,6 @@ pre_python_environment_install() { export PYTHON_NOGIL_PATH="${PYTHON_NOGIL_CLONE_PATH}/python" cd $OLDPWD - elif [[ "$BUILD_WITH_ICC" == "true" ]]; then - wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB - sudo apt-key add GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB - rm GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB - sudo add-apt-repository "deb https://apt.repos.intel.com/oneapi all main" - sudo apt-get update - sudo apt-get install intel-oneapi-compiler-dpcpp-cpp-and-cpp-classic - source /opt/intel/oneapi/setvars.sh - fi } @@ -122,13 +113,6 @@ scikit_learn_install() { export LDFLAGS="$LDFLAGS -Wl,--sysroot=/" fi - if [[ "$BUILD_WITH_ICC" == "true" ]]; then - # The "build_clib" command is implicitly used to build "libsvm-skl". - # To compile with a different compiler, we also need to specify the - # compiler for this command - python setup.py build_ext --compiler=intelem -i build_clib --compiler=intelem - fi - # TODO use a specific variable for this rather than using a particular build ... if [[ "$DISTRIB" == "conda-pip-latest" ]]; then # Check that pip can automatically build scikit-learn with the build diff --git a/build_tools/azure/test_docs.sh b/build_tools/azure/test_docs.sh index 1d28f64a036cd..61e855425786b 100755 --- a/build_tools/azure/test_docs.sh +++ b/build_tools/azure/test_docs.sh @@ -8,8 +8,4 @@ elif [[ "$DISTRIB" == "ubuntu" || "$DISTRIB" == "pip-nogil" ]]; then source $VIRTUALENV/bin/activate fi -if [[ "$BUILD_WITH_ICC" == "true" ]]; then - source /opt/intel/oneapi/setvars.sh -fi - make test-doc diff --git a/build_tools/azure/test_script.sh b/build_tools/azure/test_script.sh index d39e1ce69cde0..03829142ab4f4 100755 --- a/build_tools/azure/test_script.sh +++ b/build_tools/azure/test_script.sh @@ -11,10 +11,6 @@ elif [[ "$DISTRIB" == "ubuntu" || "$DISTRIB" == "debian-32" || "$DISTRIB" == "pi source $VIRTUALENV/bin/activate fi -if [[ "$BUILD_WITH_ICC" == "true" ]]; then - source /opt/intel/oneapi/setvars.sh -fi - if [[ "$BUILD_REASON" == "Schedule" ]]; then # Enable global random seed randomization to discover seed-sensitive tests # only on nightly builds. From 5dda5d4baffa19b8e63a449e1826a7aca84e0fbe Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Wed, 26 Oct 2022 10:20:03 -0400 Subject: [PATCH 09/26] CLN Remove more distutil references [cd build gh] --- build_tools/circle/list_versions.py | 8 +-- build_tools/github/build_wheels.sh | 5 -- maint_tools/check_pxd_in_installation.py | 3 +- setup.py | 7 +-- sklearn/_build_utils/__init__.py | 5 +- sklearn/_build_utils/openmp_helpers.py | 2 +- .../_pairwise_distances_reduction/setup.py | 55 ------------------- 7 files changed, 9 insertions(+), 76 deletions(-) delete mode 100644 sklearn/metrics/_pairwise_distances_reduction/setup.py diff --git a/build_tools/circle/list_versions.py b/build_tools/circle/list_versions.py index 68e198f8bdb38..dfcc600957469 100755 --- a/build_tools/circle/list_versions.py +++ b/build_tools/circle/list_versions.py @@ -5,7 +5,7 @@ import re import sys -from distutils.version import LooseVersion +from sklearn.utils.fixes import parse_version from urllib.request import urlopen @@ -37,8 +37,8 @@ def get_file_extension(version): # The 'dev' branch should be explicitly handled return "zip" - current_version = LooseVersion(version) - min_zip_version = LooseVersion("0.24") + current_version = parse_version(version) + min_zip_version = parse_version("0.24") return "zip" if current_version >= min_zip_version else "pdf" @@ -94,7 +94,7 @@ def get_file_size(version): # Output in order: dev, stable, decreasing other version seen = set() for name in NAMED_DIRS + sorted( - (k for k in dirs if k[:1].isdigit()), key=LooseVersion, reverse=True + (k for k in dirs if k[:1].isdigit()), key=parse_version, reverse=True ): version_num, file_size = dirs[name] if version_num in seen: diff --git a/build_tools/github/build_wheels.sh b/build_tools/github/build_wheels.sh index f14a8645c85aa..647b47492774b 100755 --- a/build_tools/github/build_wheels.sh +++ b/build_tools/github/build_wheels.sh @@ -31,11 +31,6 @@ if [[ "$RUNNER_OS" == "macOS" ]]; then export CFLAGS="$CFLAGS -I$PREFIX/include" export CXXFLAGS="$CXXFLAGS -I$PREFIX/include" export LDFLAGS="$LDFLAGS -Wl,-rpath,$PREFIX/lib -L$PREFIX/lib -lomp" - # Disable the use of setuptools's vendored copy distutils when invoking setuptools - # See: https://setuptools.pypa.io/en/latest/deprecated/distutils-legacy.html - # TODO: remove the definition of this environment variable when no - # reference to distutils exist in the code-base for building scikit-learn. - export SETUPTOOLS_USE_DISTUTILS=stdlib fi # The version of the built dependencies are specified diff --git a/maint_tools/check_pxd_in_installation.py b/maint_tools/check_pxd_in_installation.py index b792912048350..e6f64c86a3383 100644 --- a/maint_tools/check_pxd_in_installation.py +++ b/maint_tools/check_pxd_in_installation.py @@ -40,8 +40,7 @@ f.write( textwrap.dedent( """ - from distutils.core import setup - from distutils.extension import Extension + from setuptools import setup, Extension from Cython.Build import cythonize import numpy diff --git a/setup.py b/setup.py index 1292bf0dafddf..0d3974e1a7ef2 100755 --- a/setup.py +++ b/setup.py @@ -10,13 +10,8 @@ import platform import shutil -# We need to import setuptools before because it monkey-patches distutils -import setuptools # noqa - -# from setuptools._distutils.command.clean import clean as Clean -from setuptools import Command, Extension +from setuptools import Command, Extension, setup from setuptools.command.build_ext import build_ext -from setuptools import setup import traceback import importlib diff --git a/sklearn/_build_utils/__init__.py b/sklearn/_build_utils/__init__.py index 0dc7a36db2a36..4c69e7fb87277 100644 --- a/sklearn/_build_utils/__init__.py +++ b/sklearn/_build_utils/__init__.py @@ -9,11 +9,10 @@ import sklearn import contextlib -from distutils.version import LooseVersion - from .pre_build_helpers import basic_check_build from .openmp_helpers import check_openmp_support from .._min_dependencies import CYTHON_MIN_VERSION +from ..utils.fixes import parse_version DEFAULT_ROOT = "sklearn" @@ -30,7 +29,7 @@ def _check_cython_version(): # Re-raise with more informative error message instead: raise ModuleNotFoundError(message) from e - if LooseVersion(Cython.__version__) < CYTHON_MIN_VERSION: + if parse_version(Cython.__version__) < parse_version(CYTHON_MIN_VERSION): message += " The current version of Cython is {} installed in {}.".format( Cython.__version__, Cython.__path__ ) diff --git a/sklearn/_build_utils/openmp_helpers.py b/sklearn/_build_utils/openmp_helpers.py index 192e96cd30765..1c1c2248e75d1 100644 --- a/sklearn/_build_utils/openmp_helpers.py +++ b/sklearn/_build_utils/openmp_helpers.py @@ -10,7 +10,7 @@ import warnings import subprocess -from distutils.errors import CompileError, LinkError +from setuptools.errors import CompileError, LinkError from .pre_build_helpers import compile_test_program diff --git a/sklearn/metrics/_pairwise_distances_reduction/setup.py b/sklearn/metrics/_pairwise_distances_reduction/setup.py deleted file mode 100644 index e1fbbceea7eb8..0000000000000 --- a/sklearn/metrics/_pairwise_distances_reduction/setup.py +++ /dev/null @@ -1,55 +0,0 @@ -import os - -import numpy as np -from numpy.distutils.misc_util import Configuration - -from sklearn._build_utils import gen_from_templates - - -def configuration(parent_package="", top_path=None): - config = Configuration("_pairwise_distances_reduction", parent_package, top_path) - libraries = [] - if os.name == "posix": - libraries.append("m") - - templates = [ - "sklearn/metrics/_pairwise_distances_reduction/_datasets_pair.pyx.tp", - "sklearn/metrics/_pairwise_distances_reduction/_datasets_pair.pxd.tp", - "sklearn/metrics/_pairwise_distances_reduction/_gemm_term_computer.pyx.tp", - "sklearn/metrics/_pairwise_distances_reduction/_gemm_term_computer.pxd.tp", - "sklearn/metrics/_pairwise_distances_reduction/_base.pyx.tp", - "sklearn/metrics/_pairwise_distances_reduction/_base.pxd.tp", - "sklearn/metrics/_pairwise_distances_reduction/_argkmin.pyx.tp", - "sklearn/metrics/_pairwise_distances_reduction/_argkmin.pxd.tp", - "sklearn/metrics/_pairwise_distances_reduction/_radius_neighbors.pyx.tp", - "sklearn/metrics/_pairwise_distances_reduction/_radius_neighbors.pxd.tp", - ] - - gen_from_templates(templates) - - cython_sources = [ - "_datasets_pair.pyx", - "_gemm_term_computer.pyx", - "_base.pyx", - "_argkmin.pyx", - "_radius_neighbors.pyx", - ] - - for source_file in cython_sources: - private_extension_name = source_file.replace(".pyx", "") - config.add_extension( - name=private_extension_name, - sources=[source_file], - include_dirs=[np.get_include()], - language="c++", - libraries=libraries, - extra_compile_args=["-std=c++11"], - ) - - return config - - -if __name__ == "__main__": - from numpy.distutils.core import setup - - setup(**configuration().todict()) From 429eed6a63565ec2f173bc95430b63a3080e2d52 Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Wed, 26 Oct 2022 10:46:25 -0400 Subject: [PATCH 10/26] FIX Fixes build_utils [cd build gh] --- sklearn/_build_utils/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/sklearn/_build_utils/__init__.py b/sklearn/_build_utils/__init__.py index 4c69e7fb87277..48b9807fc1bf6 100644 --- a/sklearn/_build_utils/__init__.py +++ b/sklearn/_build_utils/__init__.py @@ -9,10 +9,11 @@ import sklearn import contextlib +from distutils.version import LooseVersion + from .pre_build_helpers import basic_check_build from .openmp_helpers import check_openmp_support from .._min_dependencies import CYTHON_MIN_VERSION -from ..utils.fixes import parse_version DEFAULT_ROOT = "sklearn" @@ -29,7 +30,7 @@ def _check_cython_version(): # Re-raise with more informative error message instead: raise ModuleNotFoundError(message) from e - if parse_version(Cython.__version__) < parse_version(CYTHON_MIN_VERSION): + if LooseVersion(Cython.__version__) < LooseVersion(CYTHON_MIN_VERSION): message += " The current version of Cython is {} installed in {}.".format( Cython.__version__, Cython.__path__ ) From 2f547bcf436c73ba6b19c18006f78a20bc2adde9 Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Wed, 26 Oct 2022 10:48:12 -0400 Subject: [PATCH 11/26] FIX Remove disutils.version [cd build gh] --- sklearn/_build_utils/__init__.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/sklearn/_build_utils/__init__.py b/sklearn/_build_utils/__init__.py index 48b9807fc1bf6..6828192aaf4a5 100644 --- a/sklearn/_build_utils/__init__.py +++ b/sklearn/_build_utils/__init__.py @@ -9,11 +9,10 @@ import sklearn import contextlib -from distutils.version import LooseVersion - from .pre_build_helpers import basic_check_build from .openmp_helpers import check_openmp_support from .._min_dependencies import CYTHON_MIN_VERSION +from ..externals._packaging.version import parse DEFAULT_ROOT = "sklearn" @@ -30,7 +29,7 @@ def _check_cython_version(): # Re-raise with more informative error message instead: raise ModuleNotFoundError(message) from e - if LooseVersion(Cython.__version__) < LooseVersion(CYTHON_MIN_VERSION): + if parse(Cython.__version__) < parse(CYTHON_MIN_VERSION): message += " The current version of Cython is {} installed in {}.".format( Cython.__version__, Cython.__path__ ) From bd229003fe51a893e8415912abd20f3b78497cbc Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Wed, 26 Oct 2022 10:56:31 -0400 Subject: [PATCH 12/26] CI Set minimum setuptools [cd build gh] --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 92ed0f0564eee..b9357a925f20e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [build-system] # Minimum requirements for the build system to execute. requires = [ - "setuptools<60.0", + "setuptools>=61.0", "wheel", "Cython>=0.29.24", From af0b25c946e6aeab278a88e55d39bb6da2111d0e Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Wed, 26 Oct 2022 11:20:11 -0400 Subject: [PATCH 13/26] CI Allow for lower versions of setuptools [cd build gh] --- pyproject.toml | 2 +- sklearn/_build_utils/openmp_helpers.py | 11 ++++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b9357a925f20e..92ed0f0564eee 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [build-system] # Minimum requirements for the build system to execute. requires = [ - "setuptools>=61.0", + "setuptools<60.0", "wheel", "Cython>=0.29.24", diff --git a/sklearn/_build_utils/openmp_helpers.py b/sklearn/_build_utils/openmp_helpers.py index 1c1c2248e75d1..d35d89dfe580d 100644 --- a/sklearn/_build_utils/openmp_helpers.py +++ b/sklearn/_build_utils/openmp_helpers.py @@ -8,9 +8,6 @@ import sys import textwrap import warnings -import subprocess - -from setuptools.errors import CompileError, LinkError from .pre_build_helpers import compile_test_program @@ -75,6 +72,7 @@ def check_openmp_support(): extra_postargs = get_openmp_flag + exception_msg = "" try: output = compile_test_program( code, extra_preargs=extra_preargs, extra_postargs=extra_postargs @@ -91,12 +89,15 @@ def check_openmp_support(): else: openmp_supported = False - except (CompileError, LinkError, subprocess.CalledProcessError): + except Exception as exception: + exception_msg = str(exception) openmp_supported = False if not openmp_supported: if os.getenv("SKLEARN_FAIL_NO_OPENMP"): - raise CompileError("Failed to build with OpenMP") + raise Exception( + f"Failed to build with OpenMP, with message {exception_msg}" + ) else: message = textwrap.dedent( """ From bc55cd7241f67077ce85bf944ff6d7a83972da72 Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Wed, 26 Oct 2022 11:33:29 -0400 Subject: [PATCH 14/26] DOC Better formatting [cd build gh] --- sklearn/_build_utils/openmp_helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sklearn/_build_utils/openmp_helpers.py b/sklearn/_build_utils/openmp_helpers.py index d35d89dfe580d..2387028ec0288 100644 --- a/sklearn/_build_utils/openmp_helpers.py +++ b/sklearn/_build_utils/openmp_helpers.py @@ -96,7 +96,7 @@ def check_openmp_support(): if not openmp_supported: if os.getenv("SKLEARN_FAIL_NO_OPENMP"): raise Exception( - f"Failed to build with OpenMP, with message {exception_msg}" + f"Failed to build with OpenMP, with error message: {exception_msg}" ) else: message = textwrap.dedent( From aebf345d7426a702bf398f937d4bfa2d373aa5fe Mon Sep 17 00:00:00 2001 From: Olivier Grisel Date: Thu, 3 Nov 2022 15:38:12 +0100 Subject: [PATCH 15/26] Update comment for builtins.__SKLEARN_SETUP__ --- setup.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/setup.py b/setup.py index 0d3974e1a7ef2..25f6dd6d4f347 100755 --- a/setup.py +++ b/setup.py @@ -22,12 +22,11 @@ # Python 2 compat: just to be able to declare that Python >=3.8 is needed. import __builtin__ as builtins -# This is a bit (!) hackish: we are setting a global variable so that the -# main sklearn __init__ can detect if it is being loaded by the setup -# routine, to avoid attempting to load components that aren't built yet: -# the numpy distutils extensions that are used by scikit-learn to -# recursively build the compiled extensions in sub-packages is based on the -# Python import machinery. +# This is a bit (!) hackish: we are setting a global variable so that the main +# sklearn __init__ can detect if it is being loaded by the setup routine, to +# avoid attempting to load components that aren't built yet. +# TODO: can this be simplified or removed since the switch to setuptools +# away from numpy.distutils? builtins.__SKLEARN_SETUP__ = True From 2d162e8d8e31501b0ba29834b1be390f20b3dd3d Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Thu, 3 Nov 2022 11:22:14 -0400 Subject: [PATCH 16/26] Apply suggestions from code review Co-authored-by: Julien Jerphanion --- setup.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/setup.py b/setup.py index 25f6dd6d4f347..d2ece89712794 100755 --- a/setup.py +++ b/setup.py @@ -192,6 +192,8 @@ def build_extensions(self): build_ext.build_extensions(self) def run(self): + # Specifying `build_clib` allows running `python setup.py develop` + # fully from a fresh clone. self.run_command("build_clib") build_ext.run(self) From a4db54ed0ad2c49a7b96df1f28b033eb9cb9d799 Mon Sep 17 00:00:00 2001 From: Olivier Grisel Date: Thu, 3 Nov 2022 16:29:33 +0100 Subject: [PATCH 17/26] Do not use distutils for BLAS/LAPACK inspection --- doc/computing/computational_performance.rst | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/doc/computing/computational_performance.rst b/doc/computing/computational_performance.rst index a7fc692fbaaa7..bb8a130d5f71e 100644 --- a/doc/computing/computational_performance.rst +++ b/doc/computing/computational_performance.rst @@ -278,11 +278,9 @@ BLAS implementation and lead to orders of magnitude speedup over a non-optimized BLAS. You can display the BLAS / LAPACK implementation used by your NumPy / SciPy / -scikit-learn install with the following commands:: +scikit-learn install with the following command:: - from numpy.distutils.system_info import get_info - print(get_info('blas_opt')) - print(get_info('lapack_opt')) + python -c "import sklearn; sklearn.show_versions()" Optimized BLAS / LAPACK implementations include: - Atlas (need hardware specific tuning by rebuilding on the target machine) From 38b61b5f703eb0ddd4c662bef1a89cf132f6d528 Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Thu, 3 Nov 2022 11:38:17 -0400 Subject: [PATCH 18/26] Apply suggestions from code review Co-authored-by: Olivier Grisel --- sklearn/_build_utils/openmp_helpers.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/sklearn/_build_utils/openmp_helpers.py b/sklearn/_build_utils/openmp_helpers.py index 2387028ec0288..1d46ce98b27ff 100644 --- a/sklearn/_build_utils/openmp_helpers.py +++ b/sklearn/_build_utils/openmp_helpers.py @@ -89,15 +89,20 @@ def check_openmp_support(): else: openmp_supported = False - except Exception as exception: - exception_msg = str(exception) + except Exception as openmp_exception: + # We could be more specific and only catch: CompileError, LinkError, + # and subprocess.CalledProcessError. + # setuptools introduced CompileError and LinkError, but that requires + # version 61.1. Even the latest version of Ubuntu (22.04LTS) only + # ships with 59.6. So for now we catch all exceptions and reraise a + # generic exception with the original error message instead: openmp_supported = False if not openmp_supported: if os.getenv("SKLEARN_FAIL_NO_OPENMP"): raise Exception( - f"Failed to build with OpenMP, with error message: {exception_msg}" - ) + f"Failed to build scikit-learn with OpenMP support" + ) from openmp_exception else: message = textwrap.dedent( """ From dd600270b8cd1b2341e8c431dc4969fad5632ba1 Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Thu, 3 Nov 2022 11:23:55 -0400 Subject: [PATCH 19/26] CLN Remove remaining references to intel compilers --- doc/developers/contributing.rst | 1 - sklearn/_build_utils/openmp_helpers.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/doc/developers/contributing.rst b/doc/developers/contributing.rst index c1d5f159462bb..560e271ee833a 100644 --- a/doc/developers/contributing.rst +++ b/doc/developers/contributing.rst @@ -549,7 +549,6 @@ message, the following actions are taken. [lint skip] Azure pipeline skips linting [scipy-dev] Build & test with our dependencies (numpy, scipy, etc ...) development builds [nogil] Build & test with the nogil experimental branches of CPython, Cython, NumPy, SciPy... - [icc-build] Build & test with the Intel C compiler (ICC) [pypy] Build & test with PyPy [float32] Run float32 tests by setting `SKLEARN_RUN_FLOAT32_TESTS=1`. See :ref:`environment_variable` for more details [doc skip] Docs are not built diff --git a/sklearn/_build_utils/openmp_helpers.py b/sklearn/_build_utils/openmp_helpers.py index 1d46ce98b27ff..739c6f94302e8 100644 --- a/sklearn/_build_utils/openmp_helpers.py +++ b/sklearn/_build_utils/openmp_helpers.py @@ -18,11 +18,11 @@ def get_openmp_flag(compiler): else: compiler = compiler.__class__.__name__ - if sys.platform == "win32" and ("icc" in compiler or "icl" in compiler): + if sys.platform == "win32": return ["/Qopenmp"] elif sys.platform == "win32": return ["/openmp"] - elif sys.platform in ("darwin", "linux") and "icc" in compiler: + elif sys.platform in ("darwin", "linux"): return ["-qopenmp"] elif sys.platform == "darwin" and "openmp" in os.getenv("CPPFLAGS", ""): # -fopenmp can't be passed as compile flag when using Apple-clang. From 4c18f92efb88c17f905995e2e2eaa22d46942710 Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Thu, 3 Nov 2022 11:27:43 -0400 Subject: [PATCH 20/26] CLN Remove remaining references to intel compilers [cd build gh] --- sklearn/_build_utils/openmp_helpers.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/sklearn/_build_utils/openmp_helpers.py b/sklearn/_build_utils/openmp_helpers.py index 739c6f94302e8..69b54a5514a02 100644 --- a/sklearn/_build_utils/openmp_helpers.py +++ b/sklearn/_build_utils/openmp_helpers.py @@ -19,11 +19,7 @@ def get_openmp_flag(compiler): compiler = compiler.__class__.__name__ if sys.platform == "win32": - return ["/Qopenmp"] - elif sys.platform == "win32": return ["/openmp"] - elif sys.platform in ("darwin", "linux"): - return ["-qopenmp"] elif sys.platform == "darwin" and "openmp" in os.getenv("CPPFLAGS", ""): # -fopenmp can't be passed as compile flag when using Apple-clang. # OpenMP support has to be enabled during preprocessing. From 24ac0fbe79bf0ed5e6406b40ce09e3e3c21551e6 Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Thu, 3 Nov 2022 11:28:29 -0400 Subject: [PATCH 21/26] CLN Remove c++ distutil entry in partition_nodes --- sklearn/neighbors/_partition_nodes.pyx | 2 -- 1 file changed, 2 deletions(-) diff --git a/sklearn/neighbors/_partition_nodes.pyx b/sklearn/neighbors/_partition_nodes.pyx index 508e9560ae8c2..f2f655a7de275 100644 --- a/sklearn/neighbors/_partition_nodes.pyx +++ b/sklearn/neighbors/_partition_nodes.pyx @@ -1,5 +1,3 @@ -# distutils : language = c++ - # BinaryTrees rely on partial sorts to partition their nodes during their # initialisation. # From a9feb59b5c50b6b17de73d55e09a7a8274223f1e Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Thu, 3 Nov 2022 11:31:05 -0400 Subject: [PATCH 22/26] CLN Improve grammar --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index d2ece89712794..bccf9be78e7ab 100755 --- a/setup.py +++ b/setup.py @@ -516,7 +516,7 @@ def configure_extension_modules(): sources.append(source) continue - # `source` is a source Tempita File + # `source` is a Tempita file tempita_sources.append(source) # Do not include pxd files that were generated by tempita From 86618099568379f4cd290ab83c5f830ec2f4b7fa Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Thu, 3 Nov 2022 11:32:49 -0400 Subject: [PATCH 23/26] TST Remove ignore warning in testing [cd build gh] --- build_tools/azure/test_script.sh | 3 --- 1 file changed, 3 deletions(-) diff --git a/build_tools/azure/test_script.sh b/build_tools/azure/test_script.sh index 03829142ab4f4..f2f4690f6633d 100755 --- a/build_tools/azure/test_script.sh +++ b/build_tools/azure/test_script.sh @@ -58,9 +58,6 @@ if [[ -n "$CHECK_WARNINGS" ]]; then # removes its usage TEST_CMD="$TEST_CMD -Wignore:tostring:DeprecationWarning" - # Python 3.10 deprecates distutils, which is imported by numpy internally - TEST_CMD="$TEST_CMD -Wignore:The\ distutils:DeprecationWarning" - # Ignore distutils deprecation warning, used by joblib internally TEST_CMD="$TEST_CMD -Wignore:distutils\ Version\ classes\ are\ deprecated:DeprecationWarning" From aad2bffe9670ea1f4e95a174cfd1451c00fe8f3f Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Thu, 3 Nov 2022 11:46:38 -0400 Subject: [PATCH 24/26] CLN Fixes warnings in openmp_helpers --- sklearn/_build_utils/openmp_helpers.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/sklearn/_build_utils/openmp_helpers.py b/sklearn/_build_utils/openmp_helpers.py index 69b54a5514a02..d26b20d24d1cc 100644 --- a/sklearn/_build_utils/openmp_helpers.py +++ b/sklearn/_build_utils/openmp_helpers.py @@ -18,8 +18,12 @@ def get_openmp_flag(compiler): else: compiler = compiler.__class__.__name__ - if sys.platform == "win32": + if sys.platform == "win32" and ("icc" in compiler or "icl" in compiler): + return ["/Qopenmp"] + elif sys.platform == "win32": return ["/openmp"] + elif sys.platform in ("darwin", "linux") and "icc" in compiler: + return ["-qopenmp"] elif sys.platform == "darwin" and "openmp" in os.getenv("CPPFLAGS", ""): # -fopenmp can't be passed as compile flag when using Apple-clang. # OpenMP support has to be enabled during preprocessing. @@ -68,7 +72,7 @@ def check_openmp_support(): extra_postargs = get_openmp_flag - exception_msg = "" + openmp_exception = None try: output = compile_test_program( code, extra_preargs=extra_preargs, extra_postargs=extra_postargs @@ -85,7 +89,7 @@ def check_openmp_support(): else: openmp_supported = False - except Exception as openmp_exception: + except Exception as exception: # We could be more specific and only catch: CompileError, LinkError, # and subprocess.CalledProcessError. # setuptools introduced CompileError and LinkError, but that requires @@ -93,11 +97,12 @@ def check_openmp_support(): # ships with 59.6. So for now we catch all exceptions and reraise a # generic exception with the original error message instead: openmp_supported = False + openmp_exception = exception if not openmp_supported: if os.getenv("SKLEARN_FAIL_NO_OPENMP"): raise Exception( - f"Failed to build scikit-learn with OpenMP support" + "Failed to build scikit-learn with OpenMP support" ) from openmp_exception else: message = textwrap.dedent( From ee3c884438e2ef795e062e4bf8bfe56c9f0c304f Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Thu, 3 Nov 2022 11:46:50 -0400 Subject: [PATCH 25/26] CI [cd build gh] From 98fe1e2f62ec0fb6d0f320c048f4c822d87bc059 Mon Sep 17 00:00:00 2001 From: "Thomas J. Fan" Date: Thu, 3 Nov 2022 11:52:07 -0400 Subject: [PATCH 26/26] CI [cd build gh] --- sklearn/_build_utils/openmp_helpers.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/sklearn/_build_utils/openmp_helpers.py b/sklearn/_build_utils/openmp_helpers.py index d26b20d24d1cc..b89d8e97f95c6 100644 --- a/sklearn/_build_utils/openmp_helpers.py +++ b/sklearn/_build_utils/openmp_helpers.py @@ -18,12 +18,8 @@ def get_openmp_flag(compiler): else: compiler = compiler.__class__.__name__ - if sys.platform == "win32" and ("icc" in compiler or "icl" in compiler): - return ["/Qopenmp"] - elif sys.platform == "win32": + if sys.platform == "win32": return ["/openmp"] - elif sys.platform in ("darwin", "linux") and "icc" in compiler: - return ["-qopenmp"] elif sys.platform == "darwin" and "openmp" in os.getenv("CPPFLAGS", ""): # -fopenmp can't be passed as compile flag when using Apple-clang. # OpenMP support has to be enabled during preprocessing.