diff --git a/.spin/LICENSE b/.spin/LICENSE new file mode 100644 index 000000000000..22ab7d811ffc --- /dev/null +++ b/.spin/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2021--2022, Scientific Python project +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/.spin/cmds.py b/.spin/cmds.py index 05e619615e58..ea00068218dd 100644 --- a/.spin/cmds.py +++ b/.spin/cmds.py @@ -5,11 +5,433 @@ import tempfile import pathlib import shutil +import json +import pathlib import click -from spin.cmds import meson from spin import util +_run = util.run + +# START of spin/cmds/meson.py +install_dir = "build-install" + +# The numpy-vendored version of Meson +meson_cli = str(pathlib.Path(__file__).parent.parent.resolve() / + 'vendored-meson' / 'meson' / 'meson.py') + + +def _set_pythonpath(quiet=False): + site_packages = _get_site_packages() + env = os.environ + + if "PYTHONPATH" in env: + env["PYTHONPATH"] = f"{site_packages}{os.pathsep}{env['PYTHONPATH']}" + else: + env["PYTHONPATH"] = site_packages + + if not quiet: + click.secho( + f'$ export PYTHONPATH="{site_packages}"', bold=True, fg="bright_blue" + ) + + return env["PYTHONPATH"] + + +def _get_site_packages(): + candidate_paths = [] + for root, dirs, _files in os.walk(install_dir): + for subdir in dirs: + if subdir == "site-packages" or subdir == "dist-packages": + candidate_paths.append(os.path.abspath(os.path.join(root, subdir))) + + X, Y = sys.version_info.major, sys.version_info.minor + + site_packages = None + if any(f"python{X}." in p for p in candidate_paths): + # We have a system that uses `python3.X/site-packages` or `python3.X/dist-packages` + site_packages = [p for p in candidate_paths if f"python{X}.{Y}" in p] + if len(site_packages) == 0: + raise FileNotFoundError( + f"No site-packages found in {install_dir} for Python {X}.{Y}" + ) + else: + site_packages = site_packages[0] + else: + # A naming scheme that does not encode the Python major/minor version is used, so return + # whatever site-packages path was found + if len(candidate_paths) > 1: + raise FileNotFoundError( + f"Multiple `site-packages` found in `{install_dir}`, but cannot use Python version to disambiguate" + ) + elif len(candidate_paths) == 1: + site_packages = candidate_paths[0] + + if site_packages is None: + raise FileNotFoundError( + f"No `site-packages` or `dist-packages` found under `{install_dir}`" + ) + + return site_packages + + +def _meson_version(): + try: + p = _run([meson_cli, "--version"], output=False, echo=False) + return p.stdout.decode("ascii").strip() + except: + pass + + +def _meson_version_configured(): + try: + meson_info_fn = os.path.join("build", "meson-info", "meson-info.json") + meson_info = json.load(open(meson_info_fn)) + return meson_info["meson_version"]["full"] + except: + pass + + +@click.command() +@click.option("-j", "--jobs", help="Number of parallel tasks to launch", type=int) +@click.option("--clean", is_flag=True, help="Clean build directory before build") +@click.option( + "-v", "--verbose", is_flag=True, help="Print all build output, even installation" +) +@click.argument("meson_args", nargs=-1) +def build(meson_args, jobs=None, clean=False, verbose=False): + """🔧 Build package with Meson/ninja and install + + MESON_ARGS are passed through e.g.: + + spin build -- -Dpkg_config_path=/lib64/pkgconfig + + The package is installed to build-install + + By default builds for release, to be able to use a debugger set CFLAGS + appropriately. For example, for linux use + + CFLAGS="-O0 -g" spin build + """ + build_dir = "build" + setup_cmd = [meson_cli, "setup", build_dir, "--prefix=/usr"] + list(meson_args) + + if clean: + print(f"Removing `{build_dir}`") + if os.path.isdir(build_dir): + shutil.rmtree(build_dir) + print(f"Removing `{install_dir}`") + if os.path.isdir(install_dir): + shutil.rmtree(install_dir) + + if not (os.path.exists(build_dir) and _meson_version_configured()): + p = _run(setup_cmd, sys_exit=False) + if p.returncode != 0: + raise RuntimeError( + "Meson configuration failed; please try `spin build` again with the `--clean` flag." + ) + else: + # Build dir has been configured; check if it was configured by + # current version of Meson + + if _meson_version() != _meson_version_configured(): + _run(setup_cmd + ["--reconfigure"]) + + # Any other conditions that warrant a reconfigure? + + p = _run([meson_cli, "compile", "-C", build_dir], sys_exit=False) + p = _run( + [ + meson_cli, + "install", + "--only-changed", + "-C", + build_dir, + "--destdir", + f"../{install_dir}", + ], + output=verbose, + ) + + +def _get_configured_command(command_name): + from spin.cmds.util import get_commands + command_groups = get_commands() + commands = [cmd for section in command_groups for cmd in command_groups[section]] + return next((cmd for cmd in commands if cmd.name == command_name), None) + + +@click.command() +@click.argument("pytest_args", nargs=-1) +@click.pass_context +def meson_test(ctx, pytest_args): + """🔧 Run tests + + PYTEST_ARGS are passed through directly to pytest, e.g.: + + spin test -- -v + + To run tests on a directory or file: + + \b + spin test numpy/linalg + spin test numpy/linalg/tests/test_linalg.py + + To run specific tests, by module, function, class, or method: + + \b + spin test -- --pyargs numpy.random + spin test -- --pyargs numpy.random.tests.test_generator_mt19937 + spin test -- --pyargs numpy.random.tests.test_generator_mt19937::TestMultivariateHypergeometric + spin test -- --pyargs numpy.random.tests.test_generator_mt19937::TestMultivariateHypergeometric::test_edge_cases + + To report the durations of the N slowest tests: + + spin test -- --durations=N + + To run tests that match a given pattern: + + \b + spin test -- -k "geometric" + spin test -- -k "geometric and not rgeometric" + + To skip tests with a given marker: + + spin test -- -m "not slow" + + To parallelize test runs (requires `pytest-xdist`): + + spin test -- -n NUM_JOBS + + For more, see `pytest --help`. + + """ + from spin.cmds.util import get_config + cfg = get_config() + + build_cmd = _get_configured_command("build") + if build_cmd: + click.secho( + "Invoking `build` prior to running tests:", bold=True, fg="bright_green" + ) + ctx.invoke(build_cmd) + + package = cfg.get("tool.spin.package", None) + if not pytest_args: + pytest_args = (package,) + if pytest_args == (None,): + print( + "Please specify `package = packagename` under `tool.spin` section of `pyproject.toml`" + ) + sys.exit(1) + + site_path = _set_pythonpath() + + # Sanity check that library built properly + if sys.version_info[:2] >= (3, 11): + p = _run([sys.executable, "-P", "-c", f"import {package}"], sys_exit=False) + if p.returncode != 0: + print(f"As a sanity check, we tried to import {package}.") + print("Stopping. Please investigate the build error.") + sys.exit(1) + + print(f'$ export PYTHONPATH="{site_path}"') + _run( + [sys.executable, "-m", "pytest", f"--rootdir={site_path}"] + list(pytest_args), + cwd=site_path, + replace=True, + ) + + +@click.command() +@click.argument("ipython_args", nargs=-1) +def ipython(ipython_args): + """💻 Launch IPython shell with PYTHONPATH set + + IPYTHON_ARGS are passed through directly to IPython, e.g.: + + spin ipython -- -i myscript.py + """ + p = _set_pythonpath() + print(f'💻 Launching IPython with PYTHONPATH="{p}"') + _run(["ipython", "--ignore-cwd"] + list(ipython_args), replace=True) + + +@click.command() +@click.argument("shell_args", nargs=-1) +def meson_shell(shell_args=[]): + """💻 Launch shell with PYTHONPATH set + + SHELL_ARGS are passed through directly to the shell, e.g.: + + spin shell -- -c 'echo $PYTHONPATH' + + Ensure that your shell init file (e.g., ~/.zshrc) does not override + the PYTHONPATH. + """ + p = _set_pythonpath() + shell = os.environ.get("SHELL", "sh") + cmd = [shell] + list(shell_args) + print(f'💻 Launching shell with PYTHONPATH="{p}"') + print("⚠ Change directory to avoid importing source instead of built package") + print("⚠ Ensure that your ~/.shellrc does not unset PYTHONPATH") + _run(cmd, replace=True) + + +@click.command() +@click.argument("python_args", nargs=-1) +def meson_python(python_args): + """🐍 Launch Python shell with PYTHONPATH set + + PYTHON_ARGS are passed through directly to Python, e.g.: + + spin python -- -c 'import sys; print(sys.path)' + """ + p = _set_pythonpath() + v = sys.version_info + if (v.major < 3) or (v.major == 3 and v.minor < 11): + print("We're sorry, but this feature only works on Python 3.11 and greater 😢") + print() + print( + "Why? Because we need the '-P' flag so the interpreter doesn't muck with PYTHONPATH" + ) + print() + print("However! You can still launch your own interpreter:") + print() + print(f" PYTHONPATH='{p}' python") + print() + print("And then call:") + print() + print("import sys; del(sys.path[0])") + sys.exit(-1) + + print(f'🐍 Launching Python with PYTHONPATH="{p}"') + + _run(["/usr/bin/env", "python", "-P"] + list(python_args), replace=True) + + +@click.command(context_settings={"ignore_unknown_options": True}) +@click.argument("args", nargs=-1) +def meson_run(args): + """🏁 Run a shell command with PYTHONPATH set + + \b + spin run make + spin run 'echo $PYTHONPATH' + spin run python -c 'import sys; del sys.path[0]; import mypkg' + + If you'd like to expand shell variables, like `$PYTHONPATH` in the example + above, you need to provide a single, quoted command to `run`: + + spin run 'echo $SHELL && echo $PWD' + + On Windows, all shell commands are run via Bash. + Install Git for Windows if you don't have Bash already. + """ + if not len(args) > 0: + raise RuntimeError("No command given") + + is_posix = sys.platform in ("linux", "darwin") + shell = len(args) == 1 + if shell: + args = args[0] + + if shell and not is_posix: + # On Windows, we're going to try to use bash + args = ["bash", "-c", args] + + _set_pythonpath(quiet=True) + _run(args, echo=False, shell=shell) + + +@click.command() +@click.argument("sphinx_target", default="html") +@click.option( + "--clean", + is_flag=True, + default=False, + help="Clean previously built docs before building", +) +@click.option( + "--build/--no-build", + "first_build", + default=True, + help="Build numpy before generating docs", +) +@click.option("--jobs", "-j", default="auto", help="Number of parallel build jobs") +@click.pass_context +def meson_docs(ctx, sphinx_target, clean, first_build, jobs): + """📖 Build Sphinx documentation + + By default, SPHINXOPTS="-W", raising errors on warnings. + To build without raising on warnings: + + SPHINXOPTS="" spin docs + + To list all Sphinx targets: + + spin docs targets + + To build another Sphinx target: + + spin docs TARGET + + """ + # Detect docs dir + doc_dir_candidates = ("doc", "docs") + doc_dir = next((d for d in doc_dir_candidates if os.path.exists(d)), None) + if doc_dir is None: + print( + f"No documentation folder found; one of {', '.join(doc_dir_candidates)} must exist" + ) + sys.exit(1) + + if sphinx_target in ("targets", "help"): + clean = False + first_build = False + sphinx_target = "help" + + if clean: + doc_dirs = [ + "./doc/build/", + "./doc/source/api/", + "./doc/source/auto_examples/", + "./doc/source/jupyterlite_contents/", + ] + for doc_dir in doc_dirs: + if os.path.isdir(doc_dir): + print(f"Removing {doc_dir!r}") + shutil.rmtree(doc_dir) + + build_cmd = _get_configured_command("build") + + if build_cmd and first_build: + click.secho( + "Invoking `build` prior to building docs:", bold=True, fg="bright_green" + ) + ctx.invoke(build_cmd) + + try: + site_path = _get_site_packages() + except FileNotFoundError: + print("No built numpy found; run `spin build` first.") + sys.exit(1) + + opts = os.environ.get("SPHINXOPTS", "-W") + os.environ["SPHINXOPTS"] = f"{opts} -j {jobs}" + click.secho( + f"$ export SPHINXOPTS={os.environ['SPHINXOPTS']}", bold=True, fg="bright_blue" + ) + + os.environ["PYTHONPATH"] = f'{site_path}{os.sep}:{os.environ.get("PYTHONPATH", "")}' + click.secho( + f"$ export PYTHONPATH={os.environ['PYTHONPATH']}", bold=True, fg="bright_blue" + ) + _run(["make", "-C", "doc", sphinx_target], replace=True) + + +# END of spin/cmds/meson.py + @click.command() @click.argument("sphinx_target", default="html") @@ -32,7 +454,7 @@ ) @click.option( "--install-deps/--no-install-deps", - default=True, + default=False, help="Install dependencies before building" ) @click.pass_context @@ -57,9 +479,9 @@ def docs(ctx, sphinx_target, clean, first_build, jobs, install_deps): if install_deps: util.run(['pip', 'install', '-q', '-r', 'doc_requirements.txt']) - meson.docs.ignore_unknown_options = True + meson_docs.ignore_unknown_options = True del ctx.params['install_deps'] - ctx.forward(meson.docs) + ctx.forward(meson_docs) @click.command() @@ -145,7 +567,7 @@ def test(ctx, pytest_args, markexpr, n_jobs, tests, verbose): for extra_param in ('markexpr', 'n_jobs', 'tests', 'verbose'): del ctx.params[extra_param] - ctx.forward(meson.test) + ctx.forward(meson_test) @click.command() @@ -172,7 +594,7 @@ def gdb(code, gdb_args): spin gdb my_tests.py spin gdb -- my_tests.py --mytest-flag """ - meson._set_pythonpath() + _set_pythonpath() gdb_args = list(gdb_args) if gdb_args and gdb_args[0].endswith('.py'): @@ -344,9 +766,9 @@ def bench(ctx, tests, compare, verbose, commits): "Invoking `build` prior to running benchmarks:", bold=True, fg="bright_green" ) - ctx.invoke(meson.build) + ctx.invoke(build) - meson._set_pythonpath() + _set_pythonpath() p = util.run( ['python', '-c', 'import numpy as np; print(np.__version__)'], @@ -402,8 +824,8 @@ def python(ctx, python_args): """ env = os.environ env['PYTHONWARNINGS'] = env.get('PYTHONWARNINGS', 'all') - ctx.invoke(meson.build) - ctx.forward(meson.python) + ctx.invoke(build) + ctx.forward(meson_python) @click.command(context_settings={ @@ -421,9 +843,9 @@ def ipython(ctx, ipython_args): env = os.environ env['PYTHONWARNINGS'] = env.get('PYTHONWARNINGS', 'all') - ctx.invoke(meson.build) + ctx.invoke(build) - ppath = meson._set_pythonpath() + ppath = _set_pythonpath() print(f'💻 Launching IPython with PYTHONPATH="{ppath}"') preimport = (r"import numpy as np; " @@ -452,5 +874,5 @@ def run(ctx, args): On Windows, all shell commands are run via Bash. Install Git for Windows if you don't have Bash already. """ - ctx.invoke(meson.build) - ctx.forward(meson.run) + ctx.invoke(build) + ctx.forward(meson_run) diff --git a/LICENSES_bundled.txt b/LICENSES_bundled.txt index 26c7a7829361..b278f473c0c7 100644 --- a/LICENSES_bundled.txt +++ b/LICENSES_bundled.txt @@ -20,3 +20,22 @@ Name: libdivide Files: numpy/core/include/numpy/libdivide/* License: Zlib For license text, see numpy/core/include/numpy/libdivide/LICENSE.txt + + +Note that the following files are vendored in the repository and sdist but not +installed in built numpy packages: + +Name: Meson +Files: vendored-meson/meson/* +License: Apache 2.0 + For license text, see vendored-meson/meson/COPYING + +Name: meson-python +Files: vendored-meson/meson-python/* +License: MIT + For license text, see vendored-meson/meson-python/LICENSE + +Name: spin +Files: .spin/meson.py +License: BSD-3 + For license text, see .spin/LICENSE diff --git a/pyproject.toml b/pyproject.toml index c19f2045ba6a..1047766834db 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,8 +1,18 @@ [build-system] build-backend = "mesonpy" +backend-path = ['./vendored-meson/meson-python'] requires = [ "Cython>=3.0", - "meson-python>=0.13.1", + # All dependencies of the vendored meson-python (except for meson, because + # we've got that vendored too - that's the point of this exercise). + 'pyproject-metadata >= 0.7.1', + 'tomli >= 1.0.0; python_version < "3.11"', + 'setuptools >= 60.0; python_version >= "3.12"', + 'colorama; os_name == "nt"', + # Note that `ninja` and (on Linux) `patchelf` are added dynamically by + # meson-python if those tools are not already present on the system. No + # need to worry about those unless one does a non-isolated build - in that + # case they must already be installed on the system. ] [project] @@ -184,7 +194,7 @@ repair-wheel-command = "bash ./tools/wheels/repair_windows.sh {wheel} {dest_dir} package = 'numpy' [tool.spin.commands] -"Build" = ["spin.cmds.meson.build", ".spin/cmds.py:test"] +"Build" = [".spin/cmds.py:build", ".spin/cmds.py:test"] "Environments" = [ ".spin/cmds.py:run", ".spin/cmds.py:ipython", ".spin/cmds.py:python", ".spin/cmds.py:gdb" diff --git a/vendored-meson/meson-python/.gitignore b/vendored-meson/meson-python/.gitignore new file mode 100644 index 000000000000..90b7672f4e4b --- /dev/null +++ b/vendored-meson/meson-python/.gitignore @@ -0,0 +1,6 @@ +# SPDX-FileCopyrightText: 2022 The meson-python developers +# +# SPDX-License-Identifier: MIT + +.mesonpy-native-file.ini +*.pyc diff --git a/vendored-meson/meson-python/LICENSE b/vendored-meson/meson-python/LICENSE new file mode 100644 index 000000000000..e43479067a13 --- /dev/null +++ b/vendored-meson/meson-python/LICENSE @@ -0,0 +1,21 @@ +Copyright © 2022 the meson-python contributors +Copyright © 2021 Quansight Labs and Filipe Laíns + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next +paragraph) shall be included in all copies or substantial portions of the +Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/vendored-meson/meson-python/LICENSES/MIT.txt b/vendored-meson/meson-python/LICENSES/MIT.txt new file mode 100644 index 000000000000..e43479067a13 --- /dev/null +++ b/vendored-meson/meson-python/LICENSES/MIT.txt @@ -0,0 +1,21 @@ +Copyright © 2022 the meson-python contributors +Copyright © 2021 Quansight Labs and Filipe Laíns + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice (including the next +paragraph) shall be included in all copies or substantial portions of the +Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL +THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. diff --git a/vendored-meson/meson-python/mesonpy/__init__.py b/vendored-meson/meson-python/mesonpy/__init__.py new file mode 100644 index 000000000000..95a1828fdae1 --- /dev/null +++ b/vendored-meson/meson-python/mesonpy/__init__.py @@ -0,0 +1,1150 @@ +# SPDX-FileCopyrightText: 2021 Filipe Laíns +# SPDX-FileCopyrightText: 2021 Quansight, LLC +# SPDX-FileCopyrightText: 2022 The meson-python developers +# +# SPDX-License-Identifier: MIT + +"""Meson Python build backend + +Implements PEP 517 hooks. +""" + +from __future__ import annotations + +import argparse +import collections +import contextlib +import difflib +import functools +import importlib.machinery +import io +import itertools +import json +import os +import pathlib +import platform +import re +import shutil +import subprocess +import sys +import sysconfig +import tarfile +import tempfile +import textwrap +import typing +import warnings + +from typing import Dict + + +if sys.version_info < (3, 11): + import tomli as tomllib +else: + import tomllib + +import packaging.version +import pyproject_metadata + +import mesonpy._compat +import mesonpy._dylib +import mesonpy._elf +import mesonpy._tags +import mesonpy._util +import mesonpy._wheelfile + +from mesonpy._compat import Collection, Mapping, cached_property, read_binary + + +if typing.TYPE_CHECKING: # pragma: no cover + from typing import ( + Any, Callable, ClassVar, DefaultDict, List, Literal, Optional, Sequence, TextIO, Tuple, Type, TypeVar, Union + ) + + from mesonpy._compat import Iterator, ParamSpec, Path + + P = ParamSpec('P') + T = TypeVar('T') + + +__version__ = '0.14.0.dev0' + +# The numpy-vendored version of Meson +meson_cli = str(pathlib.Path(__file__).resolve().parent.parent.parent / 'meson' / 'meson.py') + + +# XXX: Once Python 3.8 is our minimum supported version, get rid of +# meson_args_keys and use typing.get_args(MesonArgsKeys) instead. + +# Keep both definitions in sync! +_MESON_ARGS_KEYS = ['dist', 'setup', 'compile', 'install'] +if typing.TYPE_CHECKING: + MesonArgsKeys = Literal['dist', 'setup', 'compile', 'install'] + MesonArgs = Mapping[MesonArgsKeys, List[str]] +else: + MesonArgs = dict + + +_COLORS = { + 'red': '\33[31m', + 'cyan': '\33[36m', + 'yellow': '\33[93m', + 'light_blue': '\33[94m', + 'bold': '\33[1m', + 'dim': '\33[2m', + 'underline': '\33[4m', + 'reset': '\33[0m', +} +_NO_COLORS = {color: '' for color in _COLORS} + +_NINJA_REQUIRED_VERSION = '1.8.2' +_MESON_REQUIRED_VERSION = '0.63.3' # keep in sync with the version requirement in pyproject.toml + + +class _depstr: + """Namespace that holds the requirement strings for dependencies we *might* + need at runtime. Having them in one place makes it easier to update. + """ + patchelf = 'patchelf >= 0.11.0' + ninja = f'ninja >= {_NINJA_REQUIRED_VERSION}' + + +def _init_colors() -> Dict[str, str]: + """Detect if we should be using colors in the output. We will enable colors + if running in a TTY, and no environment variable overrides it. Setting the + NO_COLOR (https://no-color.org/) environment variable force-disables colors, + and FORCE_COLOR forces color to be used, which is useful for thing like + Github actions. + """ + if 'NO_COLOR' in os.environ: + if 'FORCE_COLOR' in os.environ: + warnings.warn( + 'Both NO_COLOR and FORCE_COLOR environment variables are set, disabling color', + stacklevel=1, + ) + return _NO_COLORS + elif 'FORCE_COLOR' in os.environ or sys.stdout.isatty(): + return _COLORS + return _NO_COLORS + + +_STYLES = _init_colors() # holds the color values, should be _COLORS or _NO_COLORS + + +_SUFFIXES = importlib.machinery.all_suffixes() +_EXTENSION_SUFFIXES = importlib.machinery.EXTENSION_SUFFIXES +_EXTENSION_SUFFIX_REGEX = re.compile(r'^\.(?:(?P[^.]+)\.)?(?:so|pyd|dll)$') +assert all(re.match(_EXTENSION_SUFFIX_REGEX, x) for x in _EXTENSION_SUFFIXES) + + +# Map Meson installation path placeholders to wheel installation paths. +# See https://docs.python.org/3/library/sysconfig.html#installation-paths +_INSTALLATION_PATH_MAP = { + '{bindir}': 'scripts', + '{py_purelib}': 'purelib', + '{py_platlib}': 'platlib', + '{moduledir_shared}': 'platlib', + '{includedir}': 'headers', + '{datadir}': 'data', + # custom location + '{libdir}': 'mesonpy-libs', + '{libdir_shared}': 'mesonpy-libs', +} + + +def _map_to_wheel(sources: Dict[str, Dict[str, Any]]) -> DefaultDict[str, List[Tuple[pathlib.Path, str]]]: + """Map files to the wheel, organized by wheel installation directrory.""" + wheel_files: DefaultDict[str, List[Tuple[pathlib.Path, str]]] = collections.defaultdict(list) + packages: Dict[str, str] = {} + + for group in sources.values(): + for src, target in group.items(): + destination = pathlib.Path(target['destination']) + anchor = destination.parts[0] + + path = _INSTALLATION_PATH_MAP.get(anchor) + if path is None: + raise BuildError(f'Could not map installation path to an equivalent wheel directory: {str(destination)!r}') + + if path == 'purelib' or path == 'platlib': + package = destination.parts[1] + other = packages.setdefault(package, path) + if other != path: + this = os.fspath(pathlib.Path(path, *destination.parts[1:])) + that = os.fspath(other / next(d for d, s in wheel_files[other] if d.parts[0] == destination.parts[1])) + raise BuildError( + f'The {package} package is split between {path} and {other}: ' + f'{this!r} and {that!r}, a "pure: false" argument may be missing in meson.build. ' + f'It is recommended to set it in "import(\'python\').find_installation()"') + + wheel_files[path].append((pathlib.Path(*destination.parts[1:]), src)) + return wheel_files + + +def _showwarning( + message: Union[Warning, str], + category: Type[Warning], + filename: str, + lineno: int, + file: Optional[TextIO] = None, + line: Optional[str] = None, +) -> None: # pragma: no cover + """Callable to override the default warning handler, to have colored output.""" + print('{yellow}meson-python: warning:{reset} {}'.format(message, **_STYLES)) + + +def _setup_cli() -> None: + """Setup CLI stuff (eg. handlers, hooks, etc.). Should only be called when + actually we are in control of the CLI, not on a normal import. + """ + warnings.showwarning = _showwarning + + try: # pragma: no cover + import colorama + except ModuleNotFoundError: # pragma: no cover + pass + else: # pragma: no cover + colorama.init() # fix colors on windows + + +class Error(RuntimeError): + def __str__(self) -> str: + return str(self.args[0]) + + +class ConfigError(Error): + """Error in the backend configuration.""" + + +class BuildError(Error): + """Error when building the wheel.""" + + +class MesonBuilderError(Error): + """Error when building the Meson package.""" + + +class _WheelBuilder(): + """Helper class to build wheels from projects.""" + + def __init__( + self, + project: Project, + metadata: Optional[pyproject_metadata.StandardMetadata], + source_dir: pathlib.Path, + build_dir: pathlib.Path, + sources: Dict[str, Dict[str, Any]], + ) -> None: + self._project = project + self._metadata = metadata + self._source_dir = source_dir + self._build_dir = build_dir + self._sources = sources + + self._libs_build_dir = self._build_dir / 'mesonpy-wheel-libs' + + @cached_property + def _wheel_files(self) -> DefaultDict[str, List[Tuple[pathlib.Path, str]]]: + return _map_to_wheel(self._sources) + + @property + def _has_internal_libs(self) -> bool: + return bool(self._wheel_files['mesonpy-libs']) + + @property + def _has_extension_modules(self) -> bool: + # Assume that all code installed in {platlib} is Python ABI dependent. + return bool(self._wheel_files['platlib']) + + @property + def normalized_name(self) -> str: + return self._project.name.replace('-', '_') + + @property + def basename(self) -> str: + """Normalized wheel name and version (eg. meson_python-1.0.0).""" + return '{distribution}-{version}'.format( + distribution=self.normalized_name, + version=self._project.version, + ) + + @property + def tag(self) -> mesonpy._tags.Tag: + """Wheel tags.""" + if self.is_pure: + return mesonpy._tags.Tag('py3', 'none', 'any') + if not self._has_extension_modules: + # The wheel has platform dependent code (is not pure) but + # does not contain any extension module (does not + # distribute any file in {platlib}) thus use generic + # implementation and ABI tags. + return mesonpy._tags.Tag('py3', 'none', None) + return mesonpy._tags.Tag(None, self._stable_abi, None) + + @property + def name(self) -> str: + """Wheel name, this includes the basename and tag.""" + return '{basename}-{tag}'.format( + basename=self.basename, + tag=self.tag, + ) + + @property + def distinfo_dir(self) -> str: + return f'{self.basename}.dist-info' + + @property + def data_dir(self) -> str: + return f'{self.basename}.data' + + @cached_property + def is_pure(self) -> bool: + """Is the wheel "pure" (architecture independent)?""" + # XXX: I imagine some users might want to force the package to be + # non-pure, but I think it's better that we evaluate use-cases as they + # arise and make sure allowing the user to override this is indeed the + # best option for the use-case. + if self._wheel_files['platlib']: + return False + for _, file in self._wheel_files['scripts']: + if self._is_native(file): + return False + return True + + @property + def wheel(self) -> bytes: + """Return WHEEL file for dist-info.""" + return textwrap.dedent(''' + Wheel-Version: 1.0 + Generator: meson + Root-Is-Purelib: {is_purelib} + Tag: {tag} + ''').strip().format( + is_purelib='true' if self.is_pure else 'false', + tag=self.tag, + ).encode() + + @property + def entrypoints_txt(self) -> bytes: + """dist-info entry_points.txt.""" + if not self._metadata: + return b'' + + data = self._metadata.entrypoints.copy() + data.update({ + 'console_scripts': self._metadata.scripts, + 'gui_scripts': self._metadata.gui_scripts, + }) + + text = '' + for entrypoint in data: + if data[entrypoint]: + text += f'[{entrypoint}]\n' + for name, target in data[entrypoint].items(): + text += f'{name} = {target}\n' + text += '\n' + + return text.encode() + + @cached_property + def _stable_abi(self) -> Optional[str]: + """Determine stabe ABI compatibility. + + Examine all files installed in {platlib} that look like + extension modules (extension .pyd on Windows, .dll on Cygwin, + and .so on other platforms) and, if they all share the same + PEP 3149 filename stable ABI tag, return it. + + Other files are ignored. + + """ + soext = sorted(_EXTENSION_SUFFIXES, key=len)[0] + abis = [] + + for path, _ in self._wheel_files['platlib']: + # NOTE: When searching for shared objects files, we assume the host + # and build machines have the same soext, even though that we might + # be cross compiling. + if path.suffix == soext: + match = re.match(r'^[^.]+(.*)$', path.name) + assert match is not None + suffix = match.group(1) + match = _EXTENSION_SUFFIX_REGEX.match(suffix) + if match: + abis.append(match.group('abi')) + + stable = [x for x in abis if x and re.match(r'abi\d+', x)] + if len(stable) > 0 and len(stable) == len(abis) and all(x == stable[0] for x in stable[1:]): + return stable[0] + return None + + @property + def top_level_modules(self) -> Collection[str]: + modules = set() + for type_ in self._wheel_files: + for path, _ in self._wheel_files[type_]: + name, dot, ext = path.parts[0].partition('.') + if dot: + # module + suffix = dot + ext + if suffix in _SUFFIXES: + modules.add(name) + else: + # package + modules.add(name) + return modules + + def _is_native(self, file: Union[str, pathlib.Path]) -> bool: + """Check if file is a native file.""" + self._project.build() # the project needs to be built for this :/ + + with open(file, 'rb') as f: + if platform.system() == 'Linux': + return f.read(4) == b'\x7fELF' # ELF + elif platform.system() == 'Darwin': + return f.read(4) in ( + b'\xfe\xed\xfa\xce', # 32-bit + b'\xfe\xed\xfa\xcf', # 64-bit + b'\xcf\xfa\xed\xfe', # arm64 + b'\xca\xfe\xba\xbe', # universal / fat (same as java class so beware!) + ) + elif platform.system() == 'Windows': + return f.read(2) == b'MZ' + + # For unknown platforms, check for file extensions. + _, ext = os.path.splitext(file) + if ext in ('.so', '.a', '.out', '.exe', '.dll', '.dylib', '.pyd'): + return True + return False + + def _install_path( # noqa: C901 + self, + wheel_file: mesonpy._wheelfile.WheelFile, + counter: mesonpy._util.CLICounter, + origin: Path, + destination: pathlib.Path, + ) -> None: + """"Install" file or directory into the wheel + and do the necessary processing before doing so. + + Some files might need to be fixed up to set the RPATH to the internal + library directory on Linux wheels for eg. + """ + location = destination.as_posix() + counter.update(location) + + # fix file + if os.path.isdir(origin): + for root, dirnames, filenames in os.walk(str(origin)): + # Sort the directory names so that `os.walk` will walk them in a + # defined order on the next iteration. + dirnames.sort() + for name in sorted(filenames): + path = os.path.normpath(os.path.join(root, name)) + if os.path.isfile(path): + arcname = os.path.join(destination, os.path.relpath(path, origin).replace(os.path.sep, '/')) + wheel_file.write(path, arcname) + else: + if self._has_internal_libs: + if platform.system() == 'Linux' or platform.system() == 'Darwin': + # add .mesonpy.libs to the RPATH of ELF files + if self._is_native(os.fspath(origin)): + # copy ELF to our working directory to avoid Meson having to regenerate the file + new_origin = self._libs_build_dir / pathlib.Path(origin).relative_to(self._build_dir) + os.makedirs(new_origin.parent, exist_ok=True) + shutil.copy2(origin, new_origin) + origin = new_origin + # add our in-wheel libs folder to the RPATH + if platform.system() == 'Linux': + elf = mesonpy._elf.ELF(origin) + libdir_path = \ + f'$ORIGIN/{os.path.relpath(f".{self._project.name}.mesonpy.libs", destination.parent)}' + if libdir_path not in elf.rpath: + elf.rpath = [*elf.rpath, libdir_path] + elif platform.system() == 'Darwin': + dylib = mesonpy._dylib.Dylib(origin) + libdir_path = \ + f'@loader_path/{os.path.relpath(f".{self._project.name}.mesonpy.libs", destination.parent)}' + if libdir_path not in dylib.rpath: + dylib.rpath = [*dylib.rpath, libdir_path] + else: + # Internal libraries are currently unsupported on this platform + raise NotImplementedError("Bundling libraries in wheel is not supported on platform '{}'" + .format(platform.system())) + + try: + wheel_file.write(origin, location) + except FileNotFoundError: + # work around for Meson bug, see https://github.com/mesonbuild/meson/pull/11655 + if not os.fspath(origin).endswith('.pdb'): + raise + + def _wheel_write_metadata(self, whl: mesonpy._wheelfile.WheelFile) -> None: + # add metadata + whl.writestr(f'{self.distinfo_dir}/METADATA', self._project.metadata) + whl.writestr(f'{self.distinfo_dir}/WHEEL', self.wheel) + if self.entrypoints_txt: + whl.writestr(f'{self.distinfo_dir}/entry_points.txt', self.entrypoints_txt) + + # add license (see https://github.com/mesonbuild/meson-python/issues/88) + if self._project.license_file: + whl.write( + self._source_dir / self._project.license_file, + f'{self.distinfo_dir}/{os.path.basename(self._project.license_file)}', + ) + + def build(self, directory: Path) -> pathlib.Path: + # ensure project is built + self._project.build() + # install the project + self._project.install() + + wheel_file = pathlib.Path(directory, f'{self.name}.whl') + + with mesonpy._wheelfile.WheelFile(wheel_file, 'w') as whl: + self._wheel_write_metadata(whl) + + with mesonpy._util.cli_counter( + len(list(itertools.chain.from_iterable(self._wheel_files.values()))), + ) as counter: + # install root scheme files + root_scheme = 'purelib' if self.is_pure else 'platlib' + for destination, origin in self._wheel_files[root_scheme]: + self._install_path(whl, counter, origin, destination) + + # install bundled libraries + for destination, origin in self._wheel_files['mesonpy-libs']: + destination = pathlib.Path(f'.{self._project.name}.mesonpy.libs', destination) + self._install_path(whl, counter, origin, destination) + + # install the other schemes + for scheme in self._wheel_files.keys(): + if scheme in (root_scheme, 'mesonpy-libs'): + continue + for destination, origin in self._wheel_files[scheme]: + destination = pathlib.Path(self.data_dir, scheme, destination) + self._install_path(whl, counter, origin, destination) + + return wheel_file + + def build_editable(self, directory: Path, verbose: bool = False) -> pathlib.Path: + # ensure project is built + self._project.build() + + wheel_file = pathlib.Path(directory, f'{self.name}.whl') + + with mesonpy._wheelfile.WheelFile(wheel_file, 'w') as whl: + self._wheel_write_metadata(whl) + whl.writestr( + f'{self.distinfo_dir}/direct_url.json', + self._source_dir.as_uri().encode('utf-8'), + ) + + # install loader module + loader_module_name = f'_{self.normalized_name.replace(".", "_")}_editable_loader' + whl.writestr( + f'{loader_module_name}.py', + read_binary('mesonpy', '_editable.py') + textwrap.dedent(f''' + install( + {self.top_level_modules!r}, + {os.fspath(self._build_dir)!r}, + {self._project._build_command!r}, + {verbose!r}, + )''').encode('utf-8')) + + # install .pth file + whl.writestr( + f'{self.normalized_name}-editable.pth', + f'import {loader_module_name}'.encode('utf-8')) + + return wheel_file + + +def _validate_pyproject_config(pyproject: Dict[str, Any]) -> Dict[str, Any]: + + def _table(scheme: Dict[str, Callable[[Any, str], Any]]) -> Callable[[Any, str], Dict[str, Any]]: + def func(value: Any, name: str) -> Dict[str, Any]: + if not isinstance(value, dict): + raise ConfigError(f'Configuration entry "{name}" must be a table') + table = {} + for key, val in value.items(): + check = scheme.get(key) + if check is None: + raise ConfigError(f'Unknown configuration entry "{name}.{key}"') + table[key] = check(val, f'{name}.{key}') + return table + return func + + def _strings(value: Any, name: str) -> List[str]: + if not isinstance(value, list) or not all(isinstance(x, str) for x in value): + raise ConfigError(f'Configuration entry "{name}" must be a list of strings') + return value + + scheme = _table({ + 'args': _table({ + name: _strings for name in _MESON_ARGS_KEYS + }) + }) + + table = pyproject.get('tool', {}).get('meson-python', {}) + return scheme(table, 'tool.meson-python') + + +def _validate_config_settings(config_settings: Dict[str, Any]) -> Dict[str, Any]: + """Validate options received from build frontend.""" + + def _string(value: Any, name: str) -> str: + if not isinstance(value, str): + raise ConfigError(f'Only one value for "{name}" can be specified') + return value + + def _bool(value: Any, name: str) -> bool: + return True + + def _string_or_strings(value: Any, name: str) -> List[str]: + return list([value,] if isinstance(value, str) else value) + + options = { + 'builddir': _string, + 'editable-verbose': _bool, + 'dist-args': _string_or_strings, + 'setup-args': _string_or_strings, + 'compile-args': _string_or_strings, + 'install-args': _string_or_strings, + } + assert all(f'{name}-args' in options for name in _MESON_ARGS_KEYS) + + config = {} + for key, value in config_settings.items(): + parser = options.get(key) + if parser is None: + matches = difflib.get_close_matches(key, options.keys(), n=2) + if matches: + alternatives = ' or '.join(f'"{match}"' for match in matches) + raise ConfigError(f'Unknown option "{key}". Did you mean {alternatives}?') + else: + raise ConfigError(f'Unknown option "{key}"') + config[key] = parser(value, key) + return config + + +class Project(): + """Meson project wrapper to generate Python artifacts.""" + + _ALLOWED_DYNAMIC_FIELDS: ClassVar[List[str]] = [ + 'version', + ] + _metadata: pyproject_metadata.StandardMetadata + + def __init__( + self, + source_dir: Path, + working_dir: Path, + build_dir: Optional[Path] = None, + meson_args: Optional[MesonArgs] = None, + editable_verbose: bool = False, + ) -> None: + self._source_dir = pathlib.Path(source_dir).absolute() + self._working_dir = pathlib.Path(working_dir).absolute() + self._build_dir = pathlib.Path(build_dir).absolute() if build_dir else (self._working_dir / 'build') + self._editable_verbose = editable_verbose + self._install_dir = self._working_dir / 'install' + self._meson_native_file = self._build_dir / 'meson-python-native-file.ini' + self._meson_cross_file = self._build_dir / 'meson-python-cross-file.ini' + self._meson_args: MesonArgs = collections.defaultdict(list) + + _check_meson_version() + + self._ninja = _env_ninja_command() + if self._ninja is None: + raise ConfigError(f'Could not find ninja version {_NINJA_REQUIRED_VERSION} or newer.') + os.environ.setdefault('NINJA', self._ninja) + + # make sure the build dir exists + self._build_dir.mkdir(exist_ok=True, parents=True) + self._install_dir.mkdir(exist_ok=True, parents=True) + + # setuptools-like ARCHFLAGS environment variable support + if sysconfig.get_platform().startswith('macosx-'): + archflags = os.environ.get('ARCHFLAGS', '').strip() + if archflags: + arch, *other = filter(None, (x.strip() for x in archflags.split('-arch'))) + if other: + raise ConfigError(f'Multi-architecture builds are not supported but $ARCHFLAGS={archflags!r}') + macver, _, nativearch = platform.mac_ver() + if arch != nativearch: + x = os.environ.setdefault('_PYTHON_HOST_PLATFORM', f'macosx-{macver}-{arch}') + if not x.endswith(arch): + raise ConfigError(f'$ARCHFLAGS={archflags!r} and $_PYTHON_HOST_PLATFORM={x!r} do not agree') + family = 'aarch64' if arch == 'arm64' else arch + cross_file_data = textwrap.dedent(f''' + [binaries] + c = ['cc', '-arch', {arch!r}] + cpp = ['c++', '-arch', {arch!r}] + [host_machine] + system = 'darwin' + cpu = {arch!r} + cpu_family = {family!r} + endian = 'little' + ''') + self._meson_cross_file.write_text(cross_file_data) + self._meson_args['setup'].extend(('--cross-file', os.fspath(self._meson_cross_file))) + + # load pyproject.toml + pyproject = tomllib.loads(self._source_dir.joinpath('pyproject.toml').read_text()) + + # load meson args from pyproject.toml + pyproject_config = _validate_pyproject_config(pyproject) + for key, value in pyproject_config.get('args', {}).items(): + self._meson_args[key].extend(value) + + # meson arguments from the command line take precedence over + # arguments from the configuration file thus are added later + if meson_args: + for key, value in meson_args.items(): + self._meson_args[key].extend(value) + + # write the native file + native_file_data = textwrap.dedent(f''' + [binaries] + python = '{sys.executable}' + ''') + self._meson_native_file.write_text(native_file_data) + + # reconfigure if we have a valid Meson build directory. Meson + # uses the presence of the 'meson-private/coredata.dat' file + # in the build directory as indication that the build + # directory has already been configured and arranges this file + # to be created as late as possible or deleted if something + # goes wrong during setup. + reconfigure = self._build_dir.joinpath('meson-private/coredata.dat').is_file() + + # run meson setup + self._configure(reconfigure=reconfigure) + + # package metadata + if 'project' in pyproject: + self._metadata = pyproject_metadata.StandardMetadata.from_pyproject(pyproject, self._source_dir) + else: + self._metadata = pyproject_metadata.StandardMetadata( + name=self._meson_name, version=packaging.version.Version(self._meson_version)) + self._validate_metadata() + + # set version from meson.build if dynamic + if 'version' in self._metadata.dynamic: + self._metadata.version = packaging.version.Version(self._meson_version) + + def _run(self, cmd: Sequence[str]) -> None: + """Invoke a subprocess.""" + # Flush the line to ensure that the log line with the executed + # command line appears before the command output. Without it, + # the lines appear in the wrong order in pip output. + print('{cyan}{bold}+ {}{reset}'.format(' '.join(cmd), **_STYLES), flush=True) + r = subprocess.run(cmd, cwd=self._build_dir) + if r.returncode != 0: + raise SystemExit(r.returncode) + + def _configure(self, reconfigure: bool = False) -> None: + """Configure Meson project.""" + setup_args = [ + os.fspath(self._source_dir), + os.fspath(self._build_dir), + # default build options + '-Dbuildtype=release', + '-Db_ndebug=if-release', + '-Db_vscrt=md', + # user build options + *self._meson_args['setup'], + # pass native file last to have it override the python + # interpreter path that may have been specified in user + # provided native files + f'--native-file={os.fspath(self._meson_native_file)}', + ] + if reconfigure: + setup_args.insert(0, '--reconfigure') + + self._run([meson_cli, 'setup', *setup_args]) + + def _validate_metadata(self) -> None: + """Check the pyproject.toml metadata and see if there are any issues.""" + + # check for unsupported dynamic fields + unsupported_dynamic = { + key for key in self._metadata.dynamic + if key not in self._ALLOWED_DYNAMIC_FIELDS + } + if unsupported_dynamic: + s = ', '.join(f'"{x}"' for x in unsupported_dynamic) + raise MesonBuilderError(f'Unsupported dynamic fields: {s}') + + # check if we are running on an unsupported interpreter + if self._metadata.requires_python: + self._metadata.requires_python.prereleases = True + if platform.python_version().rstrip('+') not in self._metadata.requires_python: + raise MesonBuilderError( + f'Unsupported Python version {platform.python_version()}, ' + f'expected {self._metadata.requires_python}' + ) + + @cached_property + def _wheel_builder(self) -> _WheelBuilder: + return _WheelBuilder( + self, + self._metadata, + self._source_dir, + self._build_dir, + self._install_plan, + ) + + @property + def _build_command(self) -> List[str]: + assert self._ninja is not None # help mypy out + if platform.system() == 'Windows': + # On Windows use 'meson compile' to setup the MSVC compiler + # environment. Using the --ninja-args option allows to + # provide the exact same semantics for the compile arguments + # provided by the users. + cmd = [meson_cli, 'compile'] + args = list(self._meson_args['compile']) + if args: + cmd.append(f'--ninja-args={args!r}') + return cmd + return [self._ninja, *self._meson_args['compile']] + + @functools.lru_cache(maxsize=None) + def build(self) -> None: + """Build the Meson project.""" + self._run(self._build_command) + + def install(self) -> None: + """Install the Meson project.""" + destdir = os.fspath(self._install_dir) + self._run([meson_cli, 'install', '--quiet', '--no-rebuild', '--destdir', destdir, *self._meson_args['install']]) + + @classmethod + @contextlib.contextmanager + def with_temp_working_dir( + cls, + source_dir: Path = os.path.curdir, + build_dir: Optional[Path] = None, + meson_args: Optional[MesonArgs] = None, + editable_verbose: bool = False, + ) -> Iterator[Project]: + """Creates a project instance pointing to a temporary working directory.""" + with tempfile.TemporaryDirectory(prefix='.mesonpy-', dir=os.fspath(source_dir)) as tmpdir: + yield cls(source_dir, tmpdir, build_dir, meson_args, editable_verbose) + + @functools.lru_cache() + def _info(self, name: str) -> Dict[str, Any]: + """Read info from meson-info directory.""" + file = self._build_dir.joinpath('meson-info', f'{name}.json') + return typing.cast( + Dict[str, str], + json.loads(file.read_text()) + ) + + @property + def _install_plan(self) -> Dict[str, Dict[str, Dict[str, str]]]: + """Meson install_plan metadata.""" + install_plan = self._info('intro-install_plan') + + # parse install args to extract --tags and --skip-subprojects + parser = argparse.ArgumentParser() + parser.add_argument('--tags') + parser.add_argument('--skip-subprojects', nargs='?', const='*', default='') + args, _ = parser.parse_known_args(self._meson_args['install']) + install_tags = {t.strip() for t in args.tags.split(',')} if args.tags else None + skip_subprojects = {p for p in (p.strip() for p in args.skip_subprojects.split(',')) if p} + + manifest: DefaultDict[str, Dict[str, Dict[str, str]]] = collections.defaultdict(dict) + + # filter install_plan accordingly + for key, targets in install_plan.items(): + for target, details in targets.items(): + if install_tags is not None and details['tag'] not in install_tags: + continue + subproject = details.get('subproject') + if subproject is not None and (subproject in skip_subprojects or '*' in skip_subprojects): + continue + manifest[key][target] = details + + return manifest + + @property + def _meson_name(self) -> str: + """Name in meson.build.""" + name = self._info('intro-projectinfo')['descriptive_name'] + assert isinstance(name, str) + return name + + @property + def _meson_version(self) -> str: + """Version in meson.build.""" + name = self._info('intro-projectinfo')['version'] + assert isinstance(name, str) + return name + + @property + def name(self) -> str: + """Project name.""" + return str(self._metadata.name).replace('-', '_') + + @property + def version(self) -> str: + """Project version.""" + return str(self._metadata.version) + + @cached_property + def metadata(self) -> bytes: + """Project metadata as an RFC822 message.""" + return bytes(self._metadata.as_rfc822()) + + @property + def license_file(self) -> Optional[pathlib.Path]: + if self._metadata: + license_ = self._metadata.license + if license_ and license_.file: + return pathlib.Path(license_.file) + return None + + @property + def is_pure(self) -> bool: + """Is the wheel "pure" (architecture independent)?""" + return bool(self._wheel_builder.is_pure) + + def sdist(self, directory: Path) -> pathlib.Path: + """Generates a sdist (source distribution) in the specified directory.""" + # generate meson dist file + self._run([meson_cli, 'dist', '--allow-dirty', '--no-tests', '--formats', 'gztar', *self._meson_args['dist']]) + + # move meson dist file to output path + dist_name = f'{self.name}-{self.version}' + meson_dist_name = f'{self._meson_name}-{self._meson_version}' + meson_dist_path = pathlib.Path(self._build_dir, 'meson-dist', f'{meson_dist_name}.tar.gz') + sdist = pathlib.Path(directory, f'{dist_name}.tar.gz') + + with tarfile.open(meson_dist_path, 'r:gz') as meson_dist, mesonpy._util.create_targz(sdist) as (tar, mtime): + for member in meson_dist.getmembers(): + # calculate the file path in the source directory + assert member.name, member.name + member_parts = member.name.split('/') + if len(member_parts) <= 1: + continue + path = self._source_dir.joinpath(*member_parts[1:]) + + if not path.exists() and member.isfile(): + # File doesn't exists on the source directory but exists on + # the Meson dist, so it is generated file, which we need to + # include. + # See https://mesonbuild.com/Reference-manual_builtin_meson.html#mesonadd_dist_script + + # MESON_DIST_ROOT could have a different base name + # than the actual sdist basename, so we need to rename here + file = meson_dist.extractfile(member.name) + member.name = str(pathlib.Path(dist_name, *member_parts[1:]).as_posix()) + tar.addfile(member, file) + continue + + if not path.is_file(): + continue + + info = tarfile.TarInfo(member.name) + file_stat = os.stat(path) + info.size = file_stat.st_size + info.mode = int(oct(file_stat.st_mode)[-3:], 8) + + # rewrite the path if necessary, to match the sdist distribution name + if dist_name != meson_dist_name: + info.name = pathlib.Path( + dist_name, + path.relative_to(self._source_dir) + ).as_posix() + + with path.open('rb') as f: + tar.addfile(info, fileobj=f) + + # add PKG-INFO to dist file to make it a sdist + pkginfo_info = tarfile.TarInfo(f'{dist_name}/PKG-INFO') + if mtime: + pkginfo_info.mtime = mtime + pkginfo_info.size = len(self.metadata) + tar.addfile(pkginfo_info, fileobj=io.BytesIO(self.metadata)) + + return sdist + + def wheel(self, directory: Path) -> pathlib.Path: + """Generates a wheel (binary distribution) in the specified directory.""" + file = self._wheel_builder.build(directory) + assert isinstance(file, pathlib.Path) + return file + + def editable(self, directory: Path) -> pathlib.Path: + file = self._wheel_builder.build_editable(directory, self._editable_verbose) + assert isinstance(file, pathlib.Path) + return file + + +@contextlib.contextmanager +def _project(config_settings: Optional[Dict[Any, Any]]) -> Iterator[Project]: + """Create the project given the given config settings.""" + + settings = _validate_config_settings(config_settings or {}) + meson_args = {name: settings.get(f'{name}-args', []) for name in _MESON_ARGS_KEYS} + + with Project.with_temp_working_dir( + build_dir=settings.get('builddir'), + meson_args=typing.cast(MesonArgs, meson_args), + editable_verbose=bool(settings.get('editable-verbose')) + ) as project: + yield project + + +def _parse_version_string(string: str) -> Tuple[int, ...]: + """Parse version string.""" + try: + return tuple(map(int, string.split('.')[:3])) + except ValueError: + return (0, ) + + +def _env_ninja_command(*, version: str = _NINJA_REQUIRED_VERSION) -> Optional[str]: + """Returns the path to ninja, or None if no ninja found.""" + required_version = _parse_version_string(version) + env_ninja = os.environ.get('NINJA') + ninja_candidates = [env_ninja] if env_ninja else ['ninja', 'ninja-build', 'samu'] + for ninja in ninja_candidates: + ninja_path = shutil.which(ninja) + if ninja_path is not None: + version = subprocess.run([ninja_path, '--version'], check=False, text=True, capture_output=True).stdout + if _parse_version_string(version) >= required_version: + return ninja_path + return None + + +def _check_meson_version(*, version: str = _MESON_REQUIRED_VERSION) -> None: + """Check that the meson executable in the path has an appropriate version. + + The meson Python package is a dependency of the meson-python + Python package, however, it may occur that the meson Python + package is installed but the corresponding meson command is not + available in $PATH. Implement a runtime check to verify that the + build environment is setup correcly. + + """ + required_version = _parse_version_string(version) + meson_version = subprocess.run([meson_cli, '--version'], check=False, text=True, capture_output=True).stdout + if _parse_version_string(meson_version) < required_version: + raise ConfigError(f'Could not find meson version {version} or newer, found {meson_version}.') + + +def _add_ignore_files(directory: pathlib.Path) -> None: + directory.joinpath('.gitignore').write_text(textwrap.dedent(''' + # This file is generated by meson-python. It will not be recreated if deleted or modified. + * + '''), encoding='utf-8') + directory.joinpath('.hgignore').write_text(textwrap.dedent(''' + # This file is generated by meson-python. It will not be recreated if deleted or modified. + syntax: glob + **/* + '''), encoding='utf-8') + + +def _pyproject_hook(func: Callable[P, T]) -> Callable[P, T]: + @functools.wraps(func) + def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: + try: + return func(*args, **kwargs) + except (Error, pyproject_metadata.ConfigurationError) as exc: + prefix = '{red}meson-python: error:{reset} '.format(**_STYLES) + print('\n' + textwrap.indent(str(exc), prefix)) + raise SystemExit(1) from exc + return wrapper + + +@_pyproject_hook +def get_requires_for_build_sdist( + config_settings: Optional[Dict[str, str]] = None, +) -> List[str]: + if os.environ.get('NINJA') is None and _env_ninja_command() is None: + return [_depstr.ninja] + return [] + + +@_pyproject_hook +def build_sdist( + sdist_directory: str, + config_settings: Optional[Dict[Any, Any]] = None, +) -> str: + _setup_cli() + + out = pathlib.Path(sdist_directory) + with _project(config_settings) as project: + return project.sdist(out).name + + +@_pyproject_hook +def get_requires_for_build_wheel( + config_settings: Optional[Dict[str, str]] = None, +) -> List[str]: + dependencies = [] + + if os.environ.get('NINJA') is None and _env_ninja_command() is None: + dependencies.append(_depstr.ninja) + + if sys.platform.startswith('linux'): + # we may need patchelf + if not shutil.which('patchelf'): + # patchelf not already accessible on the system + if _env_ninja_command() is not None: + # we have ninja available, so we can run Meson and check if the project needs patchelf + with _project(config_settings) as project: + if not project.is_pure: + dependencies.append(_depstr.patchelf) + else: + # we can't check if the project needs patchelf, so always add it + # XXX: wait for https://github.com/mesonbuild/meson/pull/10779 + dependencies.append(_depstr.patchelf) + + return dependencies + + +@_pyproject_hook +def build_wheel( + wheel_directory: str, + config_settings: Optional[Dict[Any, Any]] = None, + metadata_directory: Optional[str] = None, +) -> str: + _setup_cli() + + out = pathlib.Path(wheel_directory) + with _project(config_settings) as project: + return project.wheel(out).name + + +@_pyproject_hook +def build_editable( + wheel_directory: str, + config_settings: Optional[Dict[Any, Any]] = None, + metadata_directory: Optional[str] = None, +) -> str: + _setup_cli() + + # force set a permanent builddir + if not config_settings: + config_settings = {} + if 'builddir' not in config_settings: + builddir = pathlib.Path('build') + builddir.mkdir(exist_ok=True) + if not next(builddir.iterdir(), None): + _add_ignore_files(builddir) + config_settings['builddir'] = os.fspath(builddir / str(mesonpy._tags.get_abi_tag())) + + out = pathlib.Path(wheel_directory) + with _project(config_settings) as project: + return project.editable(out).name + + +@_pyproject_hook +def get_requires_for_build_editable( + config_settings: Optional[Dict[str, str]] = None, +) -> List[str]: + return get_requires_for_build_wheel() diff --git a/vendored-meson/meson-python/mesonpy/_compat.py b/vendored-meson/meson-python/mesonpy/_compat.py new file mode 100644 index 000000000000..12a337f59f71 --- /dev/null +++ b/vendored-meson/meson-python/mesonpy/_compat.py @@ -0,0 +1,67 @@ +# SPDX-FileCopyrightText: 2021 Filipe Laíns +# SPDX-FileCopyrightText: 2021 Quansight, LLC +# SPDX-FileCopyrightText: 2022 The meson-python developers +# +# SPDX-License-Identifier: MIT + +from __future__ import annotations + +import functools +import importlib.resources +import os +import pathlib +import sys +import typing + + +if sys.version_info >= (3, 9): + from collections.abc import Collection, Iterable, Iterator, Mapping, Sequence +else: + from typing import Collection, Iterable, Iterator, Mapping, Sequence + + +if sys.version_info >= (3, 8): + from functools import cached_property +else: + cached_property = lambda x: property(functools.lru_cache(maxsize=None)(x)) # noqa: E731 + + +if sys.version_info >= (3, 9): + def read_binary(package: str, resource: str) -> bytes: + return importlib.resources.files(package).joinpath(resource).read_bytes() +else: + read_binary = importlib.resources.read_binary + + +if typing.TYPE_CHECKING: + from typing import Union + + if sys.version_info >= (3, 10): + from typing import ParamSpec + else: + from typing_extensions import ParamSpec + + Path = Union[str, os.PathLike] + + +# backport og pathlib.Path.is_relative_to +def is_relative_to(path: pathlib.Path, other: Union[pathlib.Path, str]) -> bool: + try: + path.relative_to(other) + except ValueError: + return False + return True + + +__all__ = [ + 'cached_property', + 'is_relative_to', + 'read_binary', + 'Collection', + 'Iterable', + 'Iterator', + 'Mapping', + 'Path', + 'ParamSpec', + 'Sequence', +] diff --git a/vendored-meson/meson-python/mesonpy/_dylib.py b/vendored-meson/meson-python/mesonpy/_dylib.py new file mode 100644 index 000000000000..222d16adec11 --- /dev/null +++ b/vendored-meson/meson-python/mesonpy/_dylib.py @@ -0,0 +1,55 @@ +# SPDX-License-Identifier: MIT +# SPDX-FileCopyrightText: 2023 Lars Pastewka + +from __future__ import annotations + +import os +import subprocess +import typing + + +if typing.TYPE_CHECKING: + from typing import Optional + + from mesonpy._compat import Collection, Path + + +# This class is modeled after the ELF class in _elf.py +class Dylib: + def __init__(self, path: Path) -> None: + self._path = os.fspath(path) + self._rpath: Optional[Collection[str]] = None + self._needed: Optional[Collection[str]] = None + + def _otool(self, *args: str) -> str: + return subprocess.check_output(['otool', *args, self._path], stderr=subprocess.STDOUT).decode() + + def _install_name_tool(self, *args: str) -> str: + return subprocess.check_output(['install_name_tool', *args, self._path], stderr=subprocess.STDOUT).decode() + + @property + def rpath(self) -> Collection[str]: + if self._rpath is None: + self._rpath = [] + # Run otool -l to get the load commands + otool_output = self._otool('-l').strip() + # Manually parse the output for LC_RPATH + rpath_tag = False + for line in [x.split() for x in otool_output.split('\n')]: + if line == ['cmd', 'LC_RPATH']: + rpath_tag = True + elif len(line) >= 2 and line[0] == 'path' and rpath_tag: + self._rpath += [line[1]] + rpath_tag = False + return frozenset(self._rpath) + + @rpath.setter + def rpath(self, value: Collection[str]) -> None: + # We clear all LC_RPATH load commands + if self._rpath: + for rpath in self._rpath: + self._install_name_tool('-delete_rpath', rpath) + # We then rewrite the new load commands + for rpath in value: + self._install_name_tool('-add_rpath', rpath) + self._rpath = value diff --git a/vendored-meson/meson-python/mesonpy/_editable.py b/vendored-meson/meson-python/mesonpy/_editable.py new file mode 100644 index 000000000000..986ec77bcb70 --- /dev/null +++ b/vendored-meson/meson-python/mesonpy/_editable.py @@ -0,0 +1,318 @@ +# SPDX-FileCopyrightText: 2022 The meson-python developers +# +# SPDX-License-Identifier: MIT + +# This file should be standalone! It is copied during the editable hook installation. + +from __future__ import annotations + +import functools +import importlib.abc +import importlib.machinery +import importlib.util +import json +import os +import pathlib +import subprocess +import sys +import typing + + +if typing.TYPE_CHECKING: + from collections.abc import Sequence, Set + from types import ModuleType + from typing import Any, Dict, Iterator, List, Optional, Tuple, Union + NodeBase = Dict[str, Union[Node, str]] + PathStr = Union[str, os.PathLike[str]] +else: + NodeBase = dict + + +if sys.version_info >= (3, 12): + from importlib.resources.abc import Traversable, TraversableResources +elif sys.version_info >= (3, 9): + from importlib.abc import Traversable, TraversableResources +else: + class Traversable: + pass + class TraversableResources: + pass + + +MARKER = 'MESONPY_EDITABLE_SKIP' +VERBOSE = 'MESONPY_EDITABLE_VERBOSE' + + +class MesonpyOrphan(Traversable): + def __init__(self, name: str): + self._name = name + + @property + def name(self) -> str: + return self._name + + def is_dir(self) -> bool: + return False + + def is_file(self) -> bool: + return False + + def iterdir(self) -> Iterator[Traversable]: + raise FileNotFoundError() + + def open(self, *args, **kwargs): # type: ignore + raise FileNotFoundError() + + def joinpath(self, *descendants: PathStr) -> Traversable: + if not descendants: + return self + name = os.fspath(descendants[-1]).split('/')[-1] + return MesonpyOrphan(name) + + def __truediv__(self, child: PathStr) -> Traversable: + return self.joinpath(child) + + def read_bytes(self) -> bytes: + raise FileNotFoundError() + + def read_text(self, encoding: Optional[str] = None) -> str: + raise FileNotFoundError() + + +class MesonpyTraversable(Traversable): + def __init__(self, name: str, tree: Node): + self._name = name + self._tree = tree + + @property + def name(self) -> str: + return self._name + + def is_dir(self) -> bool: + return True + + def is_file(self) -> bool: + return False + + def iterdir(self) -> Iterator[Traversable]: + for name, node in self._tree.items(): + yield MesonpyTraversable(name, node) if isinstance(node, dict) else pathlib.Path(node) # type: ignore + + def open(self, *args, **kwargs): # type: ignore + raise IsADirectoryError() + + @staticmethod + def _flatten(names: Tuple[PathStr, ...]) -> Iterator[str]: + for name in names: + yield from os.fspath(name).split('/') + + def joinpath(self, *descendants: PathStr) -> Traversable: + if not descendants: + return self + names = self._flatten(descendants) + name = next(names) + node = self._tree.get(name) + if isinstance(node, dict): + return MesonpyTraversable(name, node).joinpath(*names) + if isinstance(node, str): + return pathlib.Path(node).joinpath(*names) + return MesonpyOrphan(name).joinpath(*names) + + def __truediv__(self, child: PathStr) -> Traversable: + return self.joinpath(child) + + def read_bytes(self) -> bytes: + raise IsADirectoryError() + + def read_text(self, encoding: Optional[str] = None) -> str: + raise IsADirectoryError() + + +class MesonpyReader(TraversableResources): + def __init__(self, name: str, tree: Node): + self._name = name + self._tree = tree + + def files(self) -> Traversable: + return MesonpyTraversable(self._name, self._tree) + + +class ExtensionFileLoader(importlib.machinery.ExtensionFileLoader): + def __init__(self, name: str, path: str, tree: Node): + super().__init__(name, path) + self._tree = tree + + def get_resource_reader(self, name: str) -> TraversableResources: + return MesonpyReader(name, self._tree) + + +class SourceFileLoader(importlib.machinery.SourceFileLoader): + def __init__(self, name: str, path: str, tree: Node): + super().__init__(name, path) + self._tree = tree + + def set_data(self, path: Union[bytes, str], data: bytes, *, _mode: int = ...) -> None: + # disable saving bytecode + pass + + def get_resource_reader(self, name: str) -> TraversableResources: + return MesonpyReader(name, self._tree) + + +class SourcelessFileLoader(importlib.machinery.SourcelessFileLoader): + def __init__(self, name: str, path: str, tree: Node): + super().__init__(name, path) + self._tree = tree + + def get_resource_reader(self, name: str) -> TraversableResources: + return MesonpyReader(name, self._tree) + + +LOADERS = [ + (ExtensionFileLoader, tuple(importlib.machinery.EXTENSION_SUFFIXES)), + (SourceFileLoader, tuple(importlib.machinery.SOURCE_SUFFIXES)), + (SourcelessFileLoader, tuple(importlib.machinery.BYTECODE_SUFFIXES)), +] + + +def build_module_spec(cls: type, name: str, path: str, tree: Optional[Node]) -> importlib.machinery.ModuleSpec: + loader = cls(name, path, tree) + spec = importlib.machinery.ModuleSpec(name, loader, origin=path) + spec.has_location = True + if loader.is_package(name): + spec.submodule_search_locations = [] + return spec + + +class Node(NodeBase): + """Tree structure to store a virtual filesystem view.""" + + def __missing__(self, key: str) -> Node: + value = self[key] = Node() + return value + + def __setitem__(self, key: Union[str, Tuple[str, ...]], value: Union[Node, str]) -> None: + node = self + if isinstance(key, tuple): + for k in key[:-1]: + node = typing.cast(Node, node[k]) + key = key[-1] + dict.__setitem__(node, key, value) + + def __getitem__(self, key: Union[str, Tuple[str, ...]]) -> Union[Node, str]: + node = self + if isinstance(key, tuple): + for k in key[:-1]: + node = typing.cast(Node, node[k]) + key = key[-1] + return dict.__getitem__(node, key) + + def get(self, key: Union[str, Tuple[str, ...]]) -> Optional[Union[Node, str]]: # type: ignore[override] + node = self + if isinstance(key, tuple): + for k in key[:-1]: + v = dict.get(node, k) + if v is None: + return None + node = typing.cast(Node, v) + key = key[-1] + return dict.get(node, key) + + +def walk(root: str, path: str = '') -> Iterator[pathlib.Path]: + with os.scandir(os.path.join(root, path)) as entries: + for entry in entries: + if entry.is_dir(): + yield from walk(root, os.path.join(path, entry.name)) + else: + yield pathlib.Path(path, entry.name) + + +def collect(install_plan: Dict[str, Dict[str, Any]]) -> Node: + tree = Node() + for key, data in install_plan.items(): + for src, target in data.items(): + path = pathlib.Path(target['destination']) + if path.parts[0] in {'{py_platlib}', '{py_purelib}'}: + if key == 'install_subdirs' and os.path.isdir(src): + for entry in walk(src): + tree[(*path.parts[1:], *entry.parts)] = os.path.join(src, *entry.parts) + else: + tree[path.parts[1:]] = src + return tree + + +class MesonpyMetaFinder(importlib.abc.MetaPathFinder): + def __init__(self, names: Set[str], path: str, cmd: List[str], verbose: bool = False): + self._top_level_modules = names + self._build_path = path + self._build_cmd = cmd + self._verbose = verbose + self._loaders: List[Tuple[type, str]] = [] + for loader, suffixes in LOADERS: + self._loaders.extend((loader, suffix) for suffix in suffixes) + + def __repr__(self) -> str: + return f'{self.__class__.__name__}({self._build_path!r})' + + def find_spec( + self, + fullname: str, + path: Optional[Sequence[Union[bytes, str]]] = None, + target: Optional[ModuleType] = None + ) -> Optional[importlib.machinery.ModuleSpec]: + if fullname.split('.', maxsplit=1)[0] in self._top_level_modules: + if self._build_path in os.environ.get(MARKER, '').split(os.pathsep): + return None + namespace = False + tree = self.rebuild() + parts = fullname.split('.') + + # look for a package + package = tree.get(tuple(parts)) + if isinstance(package, Node): + for loader, suffix in self._loaders: + src = package.get('__init__' + suffix) + if isinstance(src, str): + return build_module_spec(loader, fullname, src, package) + else: + namespace = True + + # look for a module + for loader, suffix in self._loaders: + src = tree.get((*parts[:-1], parts[-1] + suffix)) + if isinstance(src, str): + return build_module_spec(loader, fullname, src, None) + + # namespace + if namespace: + spec = importlib.machinery.ModuleSpec(fullname, None) + spec.submodule_search_locations = [] + return spec + + return None + + @functools.lru_cache(maxsize=1) + def rebuild(self) -> Node: + # skip editable wheel lookup during rebuild: during the build + # the module we are rebuilding might be imported causing a + # rebuild loop. + env = os.environ.copy() + env[MARKER] = os.pathsep.join((env.get(MARKER, ''), self._build_path)) + + if self._verbose or bool(env.get(VERBOSE, '')): + print('+ ' + ' '.join(self._build_cmd)) + stdout = None + else: + stdout = subprocess.DEVNULL + + subprocess.run(self._build_cmd, cwd=self._build_path, env=env, stdout=stdout, check=True) + + install_plan_path = os.path.join(self._build_path, 'meson-info', 'intro-install_plan.json') + with open(install_plan_path, 'r', encoding='utf8') as f: + install_plan = json.load(f) + return collect(install_plan) + + +def install(names: Set[str], path: str, cmd: List[str], verbose: bool) -> None: + sys.meta_path.insert(0, MesonpyMetaFinder(names, path, cmd, verbose)) diff --git a/vendored-meson/meson-python/mesonpy/_elf.py b/vendored-meson/meson-python/mesonpy/_elf.py new file mode 100644 index 000000000000..b9d8512d7f58 --- /dev/null +++ b/vendored-meson/meson-python/mesonpy/_elf.py @@ -0,0 +1,56 @@ +# SPDX-FileCopyrightText: 2021 Filipe Laíns +# SPDX-FileCopyrightText: 2021 Quansight, LLC +# SPDX-FileCopyrightText: 2022 The meson-python developers +# +# SPDX-License-Identifier: MIT + +from __future__ import annotations + +import os +import subprocess +import typing + + +if typing.TYPE_CHECKING: # pragma: no cover + from typing import Optional + + from mesonpy._compat import Collection, Path + + +class ELF: + def __init__(self, path: Path) -> None: + self._path = os.fspath(path) + self._rpath: Optional[Collection[str]] = None + self._needed: Optional[Collection[str]] = None + + def _patchelf(self, *args: str) -> str: + return subprocess.check_output(['patchelf', *args, self._path], stderr=subprocess.STDOUT).decode() + + @property + def rpath(self) -> Collection[str]: + if self._rpath is None: + rpath = self._patchelf('--print-rpath').strip() + self._rpath = rpath.split(':') if rpath else [] + return frozenset(self._rpath) + + @rpath.setter + def rpath(self, value: Collection[str]) -> None: + self._patchelf('--set-rpath', ':'.join(value)) + self._rpath = value + + @property + def needed(self) -> Collection[str]: + if self._needed is None: + self._needed = frozenset(self._patchelf('--print-needed').splitlines()) + return self._needed + + @needed.setter + def needed(self, value: Collection[str]) -> None: + value = frozenset(value) + for entry in self.needed: + if entry not in value: + self._patchelf('--remove-needed', entry) + for entry in value: + if entry not in self.needed: + self._patchelf('--add-needed', entry) + self._needed = value diff --git a/vendored-meson/meson-python/mesonpy/_tags.py b/vendored-meson/meson-python/mesonpy/_tags.py new file mode 100644 index 000000000000..7e44eb454813 --- /dev/null +++ b/vendored-meson/meson-python/mesonpy/_tags.py @@ -0,0 +1,172 @@ +# SPDX-FileCopyrightText: 2022 The meson-python developers +# +# SPDX-License-Identifier: MIT + +from __future__ import annotations + +import os +import platform +import sys +import sysconfig +import typing + + +if typing.TYPE_CHECKING: # pragma: no cover + from typing import Optional, Union + + +# https://peps.python.org/pep-0425/#python-tag +INTERPRETERS = { + 'python': 'py', + 'cpython': 'cp', + 'pypy': 'pp', + 'ironpython': 'ip', + 'jython': 'jy', +} + + +_32_BIT_INTERPRETER = sys.maxsize <= 2**32 + + +def get_interpreter_tag() -> str: + name = sys.implementation.name + name = INTERPRETERS.get(name, name) + version = sys.version_info + return f'{name}{version[0]}{version[1]}' + + +def _get_config_var(name: str, default: Union[str, int, None] = None) -> Union[str, int, None]: + value: Union[str, int, None] = sysconfig.get_config_var(name) + if value is None: + return default + return value + + +def _get_cpython_abi() -> str: + version = sys.version_info + debug = pymalloc = '' + if _get_config_var('Py_DEBUG', hasattr(sys, 'gettotalrefcount')): + debug = 'd' + if version < (3, 8) and _get_config_var('WITH_PYMALLOC', True): + pymalloc = 'm' + return f'cp{version[0]}{version[1]}{debug}{pymalloc}' + + +def get_abi_tag() -> str: + # The best solution to obtain the Python ABI is to parse the + # $SOABI or $EXT_SUFFIX sysconfig variables as defined in PEP-314. + + # PyPy reports a $SOABI that does not agree with $EXT_SUFFIX. + # Using $EXT_SUFFIX will not break when PyPy will fix this. + # See https://foss.heptapod.net/pypy/pypy/-/issues/3816 and + # https://github.com/pypa/packaging/pull/607. + try: + empty, abi, ext = str(sysconfig.get_config_var('EXT_SUFFIX')).split('.') + except ValueError as exc: + # CPython <= 3.8.7 on Windows does not implement PEP3149 and + # uses '.pyd' as $EXT_SUFFIX, which does not allow to extract + # the interpreter ABI. Check that the fallback is not hit for + # any other Python implementation. + if sys.implementation.name != 'cpython': + raise NotImplementedError from exc + return _get_cpython_abi() + + # The packaging module initially based his understanding of the + # $SOABI variable on the inconsistent value reported by PyPy, and + # did not strip architecture information from it. Therefore the + # ABI tag for later Python implementations (all the ones not + # explicitly handled below) contains architecture information too. + # Unfortunately, fixing this now would break compatibility. + + if abi.startswith('cpython'): + abi = 'cp' + abi.split('-')[1] + elif abi.startswith('cp'): + abi = abi.split('-')[0] + elif abi.startswith('pypy'): + abi = '_'.join(abi.split('-')[:2]) + elif abi.startswith('graalpy'): + abi = '_'.join(abi.split('-')[:3]) + + return abi.replace('.', '_').replace('-', '_') + + +def _get_macosx_platform_tag() -> str: + ver, _, arch = platform.mac_ver() + + # Override the architecture with the one provided in the + # _PYTHON_HOST_PLATFORM environment variable. This environment + # variable affects the sysconfig.get_platform() return value and + # is used to cross-compile python extensions on macOS for a + # different architecture. We base the platform tag computation on + # platform.mac_ver() but respect the content of the environment + # variable. + try: + arch = os.environ.get('_PYTHON_HOST_PLATFORM', '').split('-')[2] + except IndexError: + pass + + # Override the macOS version if one is provided via the + # MACOSX_DEPLOYMENT_TARGET environment variable. + try: + version = tuple(map(int, os.environ.get('MACOSX_DEPLOYMENT_TARGET', '').split('.')))[:2] + except ValueError: + version = tuple(map(int, ver.split('.')))[:2] + + # Python built with older macOS SDK on macOS 11, reports an + # unexising macOS 10.16 version instead of the real version. + # + # The packaging module introduced a workaround + # https://github.com/pypa/packaging/commit/67c4a2820c549070bbfc4bfbf5e2a250075048da + # + # This results in packaging versions up to 21.3 generating + # platform tags like "macosx_10_16_x86_64" and later versions + # generating "macosx_11_0_x86_64". Using the latter would be more + # correct but prevents the resulting wheel from being installed on + # systems using packaging 21.3 or earlier (pip 22.3 or earlier). + # + # Fortunately packaging versions carrying the workaround still + # accepts "macosx_10_16_x86_64" as a compatible platform tag. We + # can therefore ignore the issue and generate the slightly + # incorrect tag. + + major, minor = version + + if major >= 11: + # For macOS reelases up to 10.15, the major version number is + # actually part of the OS name and the minor version is the + # actual OS release. Starting with macOS 11, the major + # version number is the OS release and the minor version is + # the patch level. Reset the patch level to zero. + minor = 0 + + if _32_BIT_INTERPRETER: + # 32-bit Python running on a 64-bit kernel. + if arch == 'ppc64': + arch = 'ppc' + if arch == 'x86_64': + arch = 'i386' + + return f'macosx_{major}_{minor}_{arch}' + + +def get_platform_tag() -> str: + platform = sysconfig.get_platform() + if platform.startswith('macosx'): + return _get_macosx_platform_tag() + if _32_BIT_INTERPRETER: + # 32-bit Python running on a 64-bit kernel. + if platform == 'linux-x86_64': + return 'linux_i686' + if platform == 'linux-aarch64': + return 'linux_armv7l' + return platform.replace('-', '_').replace('.', '_') + + +class Tag: + def __init__(self, interpreter: Optional[str] = None, abi: Optional[str] = None, platform: Optional[str] = None): + self.interpreter = interpreter or get_interpreter_tag() + self.abi = abi or get_abi_tag() + self.platform = platform or get_platform_tag() + + def __str__(self) -> str: + return f'{self.interpreter}-{self.abi}-{self.platform}' diff --git a/vendored-meson/meson-python/mesonpy/_util.py b/vendored-meson/meson-python/mesonpy/_util.py new file mode 100644 index 000000000000..b21edd2cdd39 --- /dev/null +++ b/vendored-meson/meson-python/mesonpy/_util.py @@ -0,0 +1,95 @@ +# SPDX-FileCopyrightText: 2021 Filipe Laíns +# SPDX-FileCopyrightText: 2021 Quansight, LLC +# SPDX-FileCopyrightText: 2022 The meson-python developers +# +# SPDX-License-Identifier: MIT + +from __future__ import annotations + +import contextlib +import gzip +import itertools +import os +import sys +import tarfile +import typing + +from typing import IO + + +if typing.TYPE_CHECKING: # pragma: no cover + from typing import Optional, Tuple + + from mesonpy._compat import Iterable, Iterator, Path + + +@contextlib.contextmanager +def chdir(path: Path) -> Iterator[Path]: + """Context manager helper to change the current working directory -- cd.""" + old_cwd = os.getcwd() + os.chdir(os.fspath(path)) + try: + yield path + finally: + os.chdir(old_cwd) + + +@contextlib.contextmanager +def add_ld_path(paths: Iterable[str]) -> Iterator[None]: + """Context manager helper to add a path to LD_LIBRARY_PATH.""" + old_value = os.environ.get('LD_LIBRARY_PATH') + old_paths = old_value.split(os.pathsep) if old_value else [] + os.environ['LD_LIBRARY_PATH'] = os.pathsep.join([*paths, *old_paths]) + try: + yield + finally: + if old_value is not None: # pragma: no cover + os.environ['LD_LIBRARY_PATH'] = old_value + + +@contextlib.contextmanager +def create_targz(path: Path) -> Iterator[Tuple[tarfile.TarFile, Optional[int]]]: + """Opens a .tar.gz file in the file system for edition..""" + + # reproducibility + source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH') + mtime = int(source_date_epoch) if source_date_epoch else None + + os.makedirs(os.path.dirname(path), exist_ok=True) + file = typing.cast(IO[bytes], gzip.GzipFile( + path, + mode='wb', + mtime=mtime, + )) + tar = tarfile.TarFile( + mode='w', + fileobj=file, + format=tarfile.PAX_FORMAT, # changed in 3.8 to GNU + ) + + with contextlib.closing(file), tar: + yield tar, mtime + + +class CLICounter: + def __init__(self, total: int) -> None: + self._total = total - 1 + self._count = itertools.count() + + def update(self, description: str) -> None: + line = f'[{next(self._count)}/{self._total}] {description}' + if sys.stdout.isatty(): + print('\r', line, sep='', end='\33[0K', flush=True) + else: + print(line) + + def finish(self) -> None: + if sys.stdout.isatty(): + print() + + +@contextlib.contextmanager +def cli_counter(total: int) -> Iterator[CLICounter]: + counter = CLICounter(total) + yield counter + counter.finish() diff --git a/vendored-meson/meson-python/mesonpy/_wheelfile.py b/vendored-meson/meson-python/mesonpy/_wheelfile.py new file mode 100644 index 000000000000..5958572ac32f --- /dev/null +++ b/vendored-meson/meson-python/mesonpy/_wheelfile.py @@ -0,0 +1,110 @@ +# SPDX-FileCopyrightText: 2022 The meson-python developers +# +# SPDX-License-Identifier: MIT + +from __future__ import annotations + +import base64 +import csv +import hashlib +import io +import os +import re +import stat +import time +import typing +import zipfile + + +if typing.TYPE_CHECKING: # pragma: no cover + from types import TracebackType + from typing import List, Optional, Tuple, Type, Union + + from mesonpy._compat import Path + + +MIN_TIMESTAMP = 315532800 # 1980-01-01 00:00:00 UTC +WHEEL_FILENAME_REGEX = re.compile(r'^(?P[^-]+)-(?P[^-]+)(:?-(?P[^-]+))?-(?P[^-]+-[^-]+-[^-]+).whl$') + + +def _b64encode(data: bytes) -> bytes: + return base64.urlsafe_b64encode(data).rstrip(b'=') + + +class WheelFile: + """Implement the wheel package binary distribution format. + + https://packaging.python.org/en/latest/specifications/binary-distribution-format/ + """ + def __new__(cls, filename: Path, mode: str = 'r', compression: int = zipfile.ZIP_DEFLATED) -> 'WheelFile': + if mode == 'w': + return super().__new__(WheelFileWriter) + raise NotImplementedError + + @staticmethod + def timestamp(mtime: Optional[float] = None) -> Tuple[int, int, int, int, int, int]: + timestamp = int(os.environ.get('SOURCE_DATE_EPOCH', mtime or time.time())) + # The ZIP file format does not support timestamps before 1980. + timestamp = max(timestamp, MIN_TIMESTAMP) + return time.gmtime(timestamp)[0:6] + + @staticmethod + def hash(data: bytes) -> str: + return 'sha256=' + _b64encode(hashlib.sha256(data).digest()).decode('ascii') + + def writestr(self, zinfo_or_arcname: Union[str, zipfile.ZipInfo], data: bytes) -> None: + raise NotImplementedError + + def write(self, filename: Path, arcname: Optional[str] = None) -> None: + raise NotImplementedError + + def close(self) -> None: + raise NotImplementedError + + def __enter__(self) -> WheelFile: + return self + + def __exit__(self, exc_type: Type[BaseException], exc_val: BaseException, exc_tb: TracebackType) -> None: + self.close() + + +class WheelFileWriter(WheelFile): + def __init__(self, filepath: Path, mode: str, compression: int = zipfile.ZIP_DEFLATED): + filename = os.path.basename(filepath) + match = WHEEL_FILENAME_REGEX.match(filename) + if not match: + raise ValueError(f'invalid wheel filename: {filename!r}') + self.name = match.group('name') + self.version = match.group('version') + self.entries: List[Tuple[str, str, int]] = [] + self.archive = zipfile.ZipFile(filepath, mode='w', compression=compression, allowZip64=True) + + def writestr(self, zinfo_or_arcname: Union[str, zipfile.ZipInfo], data: bytes) -> None: + if isinstance(data, str): + data = data.encode('utf-8') + if isinstance(zinfo_or_arcname, zipfile.ZipInfo): + zinfo = zinfo_or_arcname + else: + zinfo = zipfile.ZipInfo(zinfo_or_arcname, date_time=self.timestamp()) + zinfo.external_attr = 0o664 << 16 + self.archive.writestr(zinfo, data) + self.entries.append((zinfo.filename, self.hash(data), len(data))) + + def write(self, filename: Path, arcname: Optional[str] = None) -> None: + with open(filename, 'rb') as f: + st = os.fstat(f.fileno()) + data = f.read() + zinfo = zipfile.ZipInfo(arcname or str(filename), date_time=self.timestamp(st.st_mtime)) + zinfo.external_attr = (stat.S_IMODE(st.st_mode) | stat.S_IFMT(st.st_mode)) << 16 + self.writestr(zinfo, data) + + def close(self) -> None: + record = f'{self.name}-{self.version}.dist-info/RECORD' + data = io.StringIO() + writer = csv.writer(data, delimiter=',', quotechar='"', lineterminator='\n') + writer.writerows(self.entries) + writer.writerow((record, '', '')) + zi = zipfile.ZipInfo(record, date_time=self.timestamp()) + zi.external_attr = 0o664 << 16 + self.archive.writestr(zi, data.getvalue()) + self.archive.close() diff --git a/vendored-meson/meson/COPYING b/vendored-meson/meson/COPYING new file mode 100644 index 000000000000..d64569567334 --- /dev/null +++ b/vendored-meson/meson/COPYING @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendored-meson/meson/meson.py b/vendored-meson/meson/meson.py new file mode 100755 index 000000000000..6f3ba2a9ee46 --- /dev/null +++ b/vendored-meson/meson/meson.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python3 + +# Copyright 2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file is an entry point for all commands, including scripts. Include the +# strict minimum python modules for performance reasons. +import sys + +# Check python version before importing anything else, we might have an older +# Python that would error on f-string syntax for example. +if sys.version_info < (3, 7): + print('Meson works correctly only with python 3.7+.') + print('You have python {}.'.format(sys.version)) + print('Please update your environment') + sys.exit(1) + +from pathlib import Path + +# If we're run uninstalled, add the script directory to sys.path to ensure that +# we always import the correct mesonbuild modules even if PYTHONPATH is mangled +meson_exe = Path(sys.argv[0]).resolve() +if (meson_exe.parent / 'mesonbuild').is_dir(): + sys.path.insert(0, str(meson_exe.parent)) + +from mesonbuild import mesonmain + +if __name__ == '__main__': + sys.exit(mesonmain.main()) diff --git a/vendored-meson/meson/mesonbuild/__init__.py b/vendored-meson/meson/mesonbuild/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/vendored-meson/meson/mesonbuild/_pathlib.py b/vendored-meson/meson/mesonbuild/_pathlib.py new file mode 100644 index 000000000000..640b5ed21dd2 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/_pathlib.py @@ -0,0 +1,73 @@ +# Copyright 2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +''' + This module soly exists to work around a pathlib.resolve bug on + certain Windows systems: + + https://github.com/mesonbuild/meson/issues/7295 + https://bugs.python.org/issue31842 + + It should **never** be used directly. Instead, it is automatically + used when `import pathlib` is used. This is achieved by messing with + `sys.modules['pathlib']` in mesonmain. + + Additionally, the sole purpose of this module is to work around a + python bug. This only bugfixes to pathlib functions and classes are + allowed here. Finally, this file should be removed once all upstream + python bugs are fixed and it is OK to tell our users to "just upgrade + python". +''' + +import pathlib +import os +import platform + +__all__ = [ + 'PurePath', + 'PurePosixPath', + 'PureWindowsPath', + 'Path', +] + +PurePath = pathlib.PurePath +PurePosixPath = pathlib.PurePosixPath +PureWindowsPath = pathlib.PureWindowsPath + +# Only patch on platforms where the bug occurs +if platform.system().lower() in {'windows'}: + # Can not directly inherit from pathlib.Path because the __new__ + # operator of pathlib.Path() returns a {Posix,Windows}Path object. + class Path(type(pathlib.Path())): + def resolve(self, strict: bool = False) -> 'Path': + ''' + Work around a resolve bug on certain Windows systems: + + https://github.com/mesonbuild/meson/issues/7295 + https://bugs.python.org/issue31842 + ''' + + try: + return super().resolve(strict=strict) + except OSError: + return Path(os.path.normpath(self)) +else: + Path = pathlib.Path + PosixPath = pathlib.PosixPath + WindowsPath = pathlib.WindowsPath + + __all__ += [ + 'PosixPath', + 'WindowsPath', + ] diff --git a/vendored-meson/meson/mesonbuild/_typing.py b/vendored-meson/meson/mesonbuild/_typing.py new file mode 100644 index 000000000000..d3cfa39d52a4 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/_typing.py @@ -0,0 +1,81 @@ +# SPDX-License-Identifer: Apache-2.0 +# Copyright 2020 The Meson development team +# Copyright © 2020-2021 Intel Corporation + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Meson specific typing helpers. + +Holds typing helper classes, such as the ImmutableProtocol classes +""" + +__all__ = [ + 'Protocol', + 'ImmutableListProtocol' +] + +import typing + +# We can change this to typing when we require python 3.8 +from typing_extensions import Protocol + + +T = typing.TypeVar('T') + + +class StringProtocol(Protocol): + def __str__(self) -> str: ... + +class SizedStringProtocol(Protocol, StringProtocol, typing.Sized): + pass + +class ImmutableListProtocol(Protocol[T]): + + """A protocol used in cases where a list is returned, but should not be + mutated. + + This provides all of the methods of a Sequence, as well as copy(). copy() + returns a list, which allows mutation as it's a copy and that's (hopefully) + safe. + + One particular case this is important is for cached values, since python is + a pass-by-reference language. + """ + + def __iter__(self) -> typing.Iterator[T]: ... + + @typing.overload + def __getitem__(self, index: int) -> T: ... + @typing.overload + def __getitem__(self, index: slice) -> typing.List[T]: ... + + def __contains__(self, item: T) -> bool: ... + + def __reversed__(self) -> typing.Iterator[T]: ... + + def __len__(self) -> int: ... + + def __add__(self, other: typing.List[T]) -> typing.List[T]: ... + + def __eq__(self, other: typing.Any) -> bool: ... + def __ne__(self, other: typing.Any) -> bool: ... + def __le__(self, other: typing.Any) -> bool: ... + def __lt__(self, other: typing.Any) -> bool: ... + def __gt__(self, other: typing.Any) -> bool: ... + def __ge__(self, other: typing.Any) -> bool: ... + + def count(self, item: T) -> int: ... + + def index(self, item: T) -> int: ... + + def copy(self) -> typing.List[T]: ... diff --git a/vendored-meson/meson/mesonbuild/arglist.py b/vendored-meson/meson/mesonbuild/arglist.py new file mode 100644 index 000000000000..c44728ab762b --- /dev/null +++ b/vendored-meson/meson/mesonbuild/arglist.py @@ -0,0 +1,331 @@ +# Copyright 2012-2020 The Meson development team +# Copyright © 2020 Intel Corporation + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from functools import lru_cache +import collections +import enum +import os +import re +import typing as T + +if T.TYPE_CHECKING: + from .linkers.linkers import StaticLinker + from .compilers import Compiler + +# execinfo is a compiler lib on BSD +UNIXY_COMPILER_INTERNAL_LIBS = ['m', 'c', 'pthread', 'dl', 'rt', 'execinfo'] # type: T.List[str] + + +class Dedup(enum.Enum): + + """What kind of deduplication can be done to compiler args. + + OVERRIDDEN - Whether an argument can be 'overridden' by a later argument. + For example, -DFOO defines FOO and -UFOO undefines FOO. In this case, + we can safely remove the previous occurrence and add a new one. The + same is true for include paths and library paths with -I and -L. + UNIQUE - Arguments that once specified cannot be undone, such as `-c` or + `-pipe`. New instances of these can be completely skipped. + NO_DEDUP - Whether it matters where or how many times on the command-line + a particular argument is present. This can matter for symbol + resolution in static or shared libraries, so we cannot de-dup or + reorder them. + """ + + NO_DEDUP = 0 + UNIQUE = 1 + OVERRIDDEN = 2 + + +class CompilerArgs(T.MutableSequence[str]): + ''' + List-like class that manages a list of compiler arguments. Should be used + while constructing compiler arguments from various sources. Can be + operated with ordinary lists, so this does not need to be used + everywhere. + + All arguments must be inserted and stored in GCC-style (-lfoo, -Idir, etc) + and can converted to the native type of each compiler by using the + .to_native() method to which you must pass an instance of the compiler or + the compiler class. + + New arguments added to this class (either with .append(), .extend(), or +=) + are added in a way that ensures that they override previous arguments. + For example: + + >>> a = ['-Lfoo', '-lbar'] + >>> a += ['-Lpho', '-lbaz'] + >>> print(a) + ['-Lpho', '-Lfoo', '-lbar', '-lbaz'] + + Arguments will also be de-duped if they can be de-duped safely. + + Note that because of all this, this class is not commutative and does not + preserve the order of arguments if it is safe to not. For example: + >>> ['-Ifoo', '-Ibar'] + ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifez', '-Ibaz', '-Ifoo', '-Ibar', '-Werror'] + >>> ['-Ifez', '-Ibaz', '-Werror'] + ['-Ifoo', '-Ibar'] + ['-Ifoo', '-Ibar', '-Ifez', '-Ibaz', '-Werror'] + + ''' + # Arg prefixes that override by prepending instead of appending + prepend_prefixes = () # type: T.Tuple[str, ...] + + # Arg prefixes and args that must be de-duped by returning 2 + dedup2_prefixes = () # type: T.Tuple[str, ...] + dedup2_suffixes = () # type: T.Tuple[str, ...] + dedup2_args = () # type: T.Tuple[str, ...] + + # Arg prefixes and args that must be de-duped by returning 1 + # + # NOTE: not thorough. A list of potential corner cases can be found in + # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038 + dedup1_prefixes = () # type: T.Tuple[str, ...] + dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') # type: T.Tuple[str, ...] + # Match a .so of the form path/to/libfoo.so.0.1.0 + # Only UNIX shared libraries require this. Others have a fixed extension. + dedup1_regex = re.compile(r'([\/\\]|\A)lib.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$') + dedup1_args = () # type: T.Tuple[str, ...] + # In generate_link() we add external libs without de-dup, but we must + # *always* de-dup these because they're special arguments to the linker + # TODO: these should probably move too + always_dedup_args = tuple('-l' + lib for lib in UNIXY_COMPILER_INTERNAL_LIBS) # type : T.Tuple[str, ...] + + def __init__(self, compiler: T.Union['Compiler', 'StaticLinker'], + iterable: T.Optional[T.Iterable[str]] = None): + self.compiler = compiler + self._container = list(iterable) if iterable is not None else [] # type: T.List[str] + self.pre = collections.deque() # type: T.Deque[str] + self.post = collections.deque() # type: T.Deque[str] + + # Flush the saved pre and post list into the _container list + # + # This correctly deduplicates the entries after _can_dedup definition + # Note: This function is designed to work without delete operations, as deletions are worsening the performance a lot. + def flush_pre_post(self) -> None: + new = [] # type: T.List[str] + pre_flush_set = set() # type: T.Set[str] + post_flush = collections.deque() # type: T.Deque[str] + post_flush_set = set() # type: T.Set[str] + + #The two lists are here walked from the front to the back, in order to not need removals for deduplication + for a in self.pre: + dedup = self._can_dedup(a) + if a not in pre_flush_set: + new.append(a) + if dedup is Dedup.OVERRIDDEN: + pre_flush_set.add(a) + for a in reversed(self.post): + dedup = self._can_dedup(a) + if a not in post_flush_set: + post_flush.appendleft(a) + if dedup is Dedup.OVERRIDDEN: + post_flush_set.add(a) + + #pre and post will overwrite every element that is in the container + #only copy over args that are in _container but not in the post flush or pre flush set + if pre_flush_set or post_flush_set: + for a in self._container: + if a not in post_flush_set and a not in pre_flush_set: + new.append(a) + else: + new.extend(self._container) + new.extend(post_flush) + + self._container = new + self.pre.clear() + self.post.clear() + + def __iter__(self) -> T.Iterator[str]: + self.flush_pre_post() + return iter(self._container) + + @T.overload # noqa: F811 + def __getitem__(self, index: int) -> str: # noqa: F811 + pass + + @T.overload # noqa: F811 + def __getitem__(self, index: slice) -> T.MutableSequence[str]: # noqa: F811 + pass + + def __getitem__(self, index: T.Union[int, slice]) -> T.Union[str, T.MutableSequence[str]]: # noqa: F811 + self.flush_pre_post() + return self._container[index] + + @T.overload # noqa: F811 + def __setitem__(self, index: int, value: str) -> None: # noqa: F811 + pass + + @T.overload # noqa: F811 + def __setitem__(self, index: slice, value: T.Iterable[str]) -> None: # noqa: F811 + pass + + def __setitem__(self, index: T.Union[int, slice], value: T.Union[str, T.Iterable[str]]) -> None: # noqa: F811 + self.flush_pre_post() + self._container[index] = value # type: ignore # TODO: fix 'Invalid index type' and 'Incompatible types in assignment' errors + + def __delitem__(self, index: T.Union[int, slice]) -> None: + self.flush_pre_post() + del self._container[index] + + def __len__(self) -> int: + return len(self._container) + len(self.pre) + len(self.post) + + def insert(self, index: int, value: str) -> None: + self.flush_pre_post() + self._container.insert(index, value) + + def copy(self) -> 'CompilerArgs': + self.flush_pre_post() + return type(self)(self.compiler, self._container.copy()) + + @classmethod + @lru_cache(maxsize=None) + def _can_dedup(cls, arg: str) -> Dedup: + """Returns whether the argument can be safely de-duped. + + In addition to these, we handle library arguments specially. + With GNU ld, we surround library arguments with -Wl,--start/end-group + to recursively search for symbols in the libraries. This is not needed + with other linkers. + """ + + # A standalone argument must never be deduplicated because it is + # defined by what comes _after_ it. Thus deduping this: + # -D FOO -D BAR + # would yield either + # -D FOO BAR + # or + # FOO -D BAR + # both of which are invalid. + if arg in cls.dedup2_prefixes: + return Dedup.NO_DEDUP + if arg in cls.dedup2_args or \ + arg.startswith(cls.dedup2_prefixes) or \ + arg.endswith(cls.dedup2_suffixes): + return Dedup.OVERRIDDEN + if arg in cls.dedup1_args or \ + arg.startswith(cls.dedup1_prefixes) or \ + arg.endswith(cls.dedup1_suffixes) or \ + re.search(cls.dedup1_regex, arg): + return Dedup.UNIQUE + return Dedup.NO_DEDUP + + @classmethod + @lru_cache(maxsize=None) + def _should_prepend(cls, arg: str) -> bool: + return arg.startswith(cls.prepend_prefixes) + + def to_native(self, copy: bool = False) -> T.List[str]: + # Check if we need to add --start/end-group for circular dependencies + # between static libraries, and for recursively searching for symbols + # needed by static libraries that are provided by object files or + # shared libraries. + self.flush_pre_post() + if copy: + new = self.copy() + else: + new = self + return self.compiler.unix_args_to_native(new._container) + + def append_direct(self, arg: str) -> None: + ''' + Append the specified argument without any reordering or de-dup except + for absolute paths to libraries, etc, which can always be de-duped + safely. + ''' + self.flush_pre_post() + if os.path.isabs(arg): + self.append(arg) + else: + self._container.append(arg) + + def extend_direct(self, iterable: T.Iterable[str]) -> None: + ''' + Extend using the elements in the specified iterable without any + reordering or de-dup except for absolute paths where the order of + include search directories is not relevant + ''' + self.flush_pre_post() + for elem in iterable: + self.append_direct(elem) + + def extend_preserving_lflags(self, iterable: T.Iterable[str]) -> None: + normal_flags = [] + lflags = [] + for i in iterable: + if i not in self.always_dedup_args and (i.startswith('-l') or i.startswith('-L')): + lflags.append(i) + else: + normal_flags.append(i) + self.extend(normal_flags) + self.extend_direct(lflags) + + def __add__(self, args: T.Iterable[str]) -> 'CompilerArgs': + self.flush_pre_post() + new = self.copy() + new += args + return new + + def __iadd__(self, args: T.Iterable[str]) -> 'CompilerArgs': + ''' + Add two CompilerArgs while taking into account overriding of arguments + and while preserving the order of arguments as much as possible + ''' + tmp_pre = collections.deque() # type: T.Deque[str] + if not isinstance(args, collections.abc.Iterable): + raise TypeError(f'can only concatenate Iterable[str] (not "{args}") to CompilerArgs') + for arg in args: + # If the argument can be de-duped, do it either by removing the + # previous occurrence of it and adding a new one, or not adding the + # new occurrence. + dedup = self._can_dedup(arg) + if dedup is Dedup.UNIQUE: + # Argument already exists and adding a new instance is useless + if arg in self._container or arg in self.pre or arg in self.post: + continue + if self._should_prepend(arg): + tmp_pre.appendleft(arg) + else: + self.post.append(arg) + self.pre.extendleft(tmp_pre) + #pre and post is going to be merged later before a iter call + return self + + def __radd__(self, args: T.Iterable[str]) -> 'CompilerArgs': + self.flush_pre_post() + new = type(self)(self.compiler, args) + new += self + return new + + def __eq__(self, other: object) -> T.Union[bool]: + self.flush_pre_post() + # Only allow equality checks against other CompilerArgs and lists instances + if isinstance(other, CompilerArgs): + return self.compiler == other.compiler and self._container == other._container + elif isinstance(other, list): + return self._container == other + return NotImplemented + + def append(self, arg: str) -> None: + self += [arg] + + def extend(self, args: T.Iterable[str]) -> None: + self += args + + def __repr__(self) -> str: + self.flush_pre_post() + return f'CompilerArgs({self.compiler!r}, {self._container!r})' diff --git a/vendored-meson/meson/mesonbuild/ast/__init__.py b/vendored-meson/meson/mesonbuild/ast/__init__.py new file mode 100644 index 000000000000..d14620f71228 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/ast/__init__.py @@ -0,0 +1,34 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool. + +__all__ = [ + 'AstConditionLevel', + 'AstInterpreter', + 'AstIDGenerator', + 'AstIndentationGenerator', + 'AstJSONPrinter', + 'AstVisitor', + 'AstPrinter', + 'IntrospectionInterpreter', + 'BUILD_TARGET_FUNCTIONS', +] + +from .interpreter import AstInterpreter +from .introspection import IntrospectionInterpreter, BUILD_TARGET_FUNCTIONS +from .visitor import AstVisitor +from .postprocess import AstConditionLevel, AstIDGenerator, AstIndentationGenerator +from .printer import AstPrinter, AstJSONPrinter diff --git a/vendored-meson/meson/mesonbuild/ast/interpreter.py b/vendored-meson/meson/mesonbuild/ast/interpreter.py new file mode 100644 index 000000000000..68e2b6e5254f --- /dev/null +++ b/vendored-meson/meson/mesonbuild/ast/interpreter.py @@ -0,0 +1,447 @@ +# Copyright 2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool. +from __future__ import annotations + +import os +import sys +import typing as T + +from .. import mparser, mesonlib +from .. import environment + +from ..interpreterbase import ( + MesonInterpreterObject, + InterpreterBase, + InvalidArguments, + BreakRequest, + ContinueRequest, + default_resolve_key, +) + +from ..interpreter import ( + StringHolder, + BooleanHolder, + IntegerHolder, + ArrayHolder, + DictHolder, +) + +from ..mparser import ( + ArgumentNode, + ArithmeticNode, + ArrayNode, + AssignmentNode, + BaseNode, + ElementaryNode, + EmptyNode, + IdNode, + MethodNode, + NotNode, + PlusAssignmentNode, + TernaryNode, +) + +if T.TYPE_CHECKING: + from .visitor import AstVisitor + from ..interpreter import Interpreter + from ..interpreterbase import TYPE_nkwargs, TYPE_nvar + from ..mparser import ( + AndNode, + ComparisonNode, + ForeachClauseNode, + IfClauseNode, + IndexNode, + OrNode, + UMinusNode, + ) + +class DontCareObject(MesonInterpreterObject): + pass + +class MockExecutable(MesonInterpreterObject): + pass + +class MockStaticLibrary(MesonInterpreterObject): + pass + +class MockSharedLibrary(MesonInterpreterObject): + pass + +class MockCustomTarget(MesonInterpreterObject): + pass + +class MockRunTarget(MesonInterpreterObject): + pass + +ADD_SOURCE = 0 +REMOVE_SOURCE = 1 + +_T = T.TypeVar('_T') +_V = T.TypeVar('_V') + +class AstInterpreter(InterpreterBase): + def __init__(self, source_root: str, subdir: str, subproject: str, visitors: T.Optional[T.List[AstVisitor]] = None): + super().__init__(source_root, subdir, subproject) + self.visitors = visitors if visitors is not None else [] + self.processed_buildfiles = set() # type: T.Set[str] + self.assignments = {} # type: T.Dict[str, BaseNode] + self.assign_vals = {} # type: T.Dict[str, T.Any] + self.reverse_assignment = {} # type: T.Dict[str, BaseNode] + self.funcs.update({'project': self.func_do_nothing, + 'test': self.func_do_nothing, + 'benchmark': self.func_do_nothing, + 'install_headers': self.func_do_nothing, + 'install_man': self.func_do_nothing, + 'install_data': self.func_do_nothing, + 'install_subdir': self.func_do_nothing, + 'install_symlink': self.func_do_nothing, + 'install_emptydir': self.func_do_nothing, + 'configuration_data': self.func_do_nothing, + 'configure_file': self.func_do_nothing, + 'find_program': self.func_do_nothing, + 'include_directories': self.func_do_nothing, + 'add_global_arguments': self.func_do_nothing, + 'add_global_link_arguments': self.func_do_nothing, + 'add_project_arguments': self.func_do_nothing, + 'add_project_dependencies': self.func_do_nothing, + 'add_project_link_arguments': self.func_do_nothing, + 'message': self.func_do_nothing, + 'generator': self.func_do_nothing, + 'error': self.func_do_nothing, + 'run_command': self.func_do_nothing, + 'assert': self.func_do_nothing, + 'subproject': self.func_do_nothing, + 'dependency': self.func_do_nothing, + 'get_option': self.func_do_nothing, + 'join_paths': self.func_do_nothing, + 'environment': self.func_do_nothing, + 'import': self.func_do_nothing, + 'vcs_tag': self.func_do_nothing, + 'add_languages': self.func_do_nothing, + 'declare_dependency': self.func_do_nothing, + 'files': self.func_do_nothing, + 'executable': self.func_do_nothing, + 'static_library': self.func_do_nothing, + 'shared_library': self.func_do_nothing, + 'library': self.func_do_nothing, + 'build_target': self.func_do_nothing, + 'custom_target': self.func_do_nothing, + 'run_target': self.func_do_nothing, + 'subdir': self.func_subdir, + 'set_variable': self.func_do_nothing, + 'get_variable': self.func_do_nothing, + 'unset_variable': self.func_do_nothing, + 'is_disabler': self.func_do_nothing, + 'is_variable': self.func_do_nothing, + 'disabler': self.func_do_nothing, + 'gettext': self.func_do_nothing, + 'jar': self.func_do_nothing, + 'warning': self.func_do_nothing, + 'shared_module': self.func_do_nothing, + 'option': self.func_do_nothing, + 'both_libraries': self.func_do_nothing, + 'add_test_setup': self.func_do_nothing, + 'find_library': self.func_do_nothing, + 'subdir_done': self.func_do_nothing, + 'alias_target': self.func_do_nothing, + 'summary': self.func_do_nothing, + 'range': self.func_do_nothing, + 'structured_sources': self.func_do_nothing, + 'debug': self.func_do_nothing, + }) + + def _unholder_args(self, args: _T, kwargs: _V) -> T.Tuple[_T, _V]: + return args, kwargs + + def _holderify(self, res: _T) -> _T: + return res + + def func_do_nothing(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> bool: + return True + + def load_root_meson_file(self) -> None: + super().load_root_meson_file() + for i in self.visitors: + self.ast.accept(i) + + def func_subdir(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None: + args = self.flatten_args(args) + if len(args) != 1 or not isinstance(args[0], str): + sys.stderr.write(f'Unable to evaluate subdir({args}) in AstInterpreter --> Skipping\n') + return + + prev_subdir = self.subdir + subdir = os.path.join(prev_subdir, args[0]) + absdir = os.path.join(self.source_root, subdir) + buildfilename = os.path.join(subdir, environment.build_filename) + absname = os.path.join(self.source_root, buildfilename) + symlinkless_dir = os.path.realpath(absdir) + build_file = os.path.join(symlinkless_dir, 'meson.build') + if build_file in self.processed_buildfiles: + sys.stderr.write('Trying to enter {} which has already been visited --> Skipping\n'.format(args[0])) + return + self.processed_buildfiles.add(build_file) + + if not os.path.isfile(absname): + sys.stderr.write(f'Unable to find build file {buildfilename} --> Skipping\n') + return + with open(absname, encoding='utf-8') as f: + code = f.read() + assert isinstance(code, str) + try: + codeblock = mparser.Parser(code, absname).parse() + except mesonlib.MesonException as me: + me.file = absname + raise me + + self.subdir = subdir + for i in self.visitors: + codeblock.accept(i) + self.evaluate_codeblock(codeblock) + self.subdir = prev_subdir + + def method_call(self, node: BaseNode) -> bool: + return True + + def evaluate_fstring(self, node: mparser.FormatStringNode) -> str: + assert isinstance(node, mparser.FormatStringNode) + return node.value + + def evaluate_arraystatement(self, cur: mparser.ArrayNode) -> TYPE_nvar: + return self.reduce_arguments(cur.args)[0] + + def evaluate_arithmeticstatement(self, cur: ArithmeticNode) -> int: + self.evaluate_statement(cur.left) + self.evaluate_statement(cur.right) + return 0 + + def evaluate_uminusstatement(self, cur: UMinusNode) -> int: + self.evaluate_statement(cur.value) + return 0 + + def evaluate_ternary(self, node: TernaryNode) -> None: + assert isinstance(node, TernaryNode) + self.evaluate_statement(node.condition) + self.evaluate_statement(node.trueblock) + self.evaluate_statement(node.falseblock) + + def evaluate_dictstatement(self, node: mparser.DictNode) -> TYPE_nkwargs: + def resolve_key(node: mparser.BaseNode) -> str: + if isinstance(node, mparser.StringNode): + return node.value + return '__AST_UNKNOWN__' + arguments, kwargs = self.reduce_arguments(node.args, key_resolver=resolve_key) + assert not arguments + self.argument_depth += 1 + for key, value in kwargs.items(): + if isinstance(key, BaseNode): + self.evaluate_statement(key) + self.argument_depth -= 1 + return {} + + def evaluate_plusassign(self, node: PlusAssignmentNode) -> None: + assert isinstance(node, PlusAssignmentNode) + # Cheat by doing a reassignment + self.assignments[node.var_name] = node.value # Save a reference to the value node + if node.value.ast_id: + self.reverse_assignment[node.value.ast_id] = node + self.assign_vals[node.var_name] = self.evaluate_statement(node.value) + + def evaluate_indexing(self, node: IndexNode) -> int: + return 0 + + def unknown_function_called(self, func_name: str) -> None: + pass + + def reduce_arguments( + self, + args: mparser.ArgumentNode, + key_resolver: T.Callable[[mparser.BaseNode], str] = default_resolve_key, + duplicate_key_error: T.Optional[str] = None, + ) -> T.Tuple[T.List[TYPE_nvar], TYPE_nkwargs]: + if isinstance(args, ArgumentNode): + kwargs = {} # type: T.Dict[str, TYPE_nvar] + for key, val in args.kwargs.items(): + kwargs[key_resolver(key)] = val + if args.incorrect_order(): + raise InvalidArguments('All keyword arguments must be after positional arguments.') + return self.flatten_args(args.arguments), kwargs + else: + return self.flatten_args(args), {} + + def evaluate_comparison(self, node: ComparisonNode) -> bool: + self.evaluate_statement(node.left) + self.evaluate_statement(node.right) + return False + + def evaluate_andstatement(self, cur: AndNode) -> bool: + self.evaluate_statement(cur.left) + self.evaluate_statement(cur.right) + return False + + def evaluate_orstatement(self, cur: OrNode) -> bool: + self.evaluate_statement(cur.left) + self.evaluate_statement(cur.right) + return False + + def evaluate_notstatement(self, cur: NotNode) -> bool: + self.evaluate_statement(cur.value) + return False + + def evaluate_foreach(self, node: ForeachClauseNode) -> None: + try: + self.evaluate_codeblock(node.block) + except ContinueRequest: + pass + except BreakRequest: + pass + + def evaluate_if(self, node: IfClauseNode) -> None: + for i in node.ifs: + self.evaluate_codeblock(i.block) + if not isinstance(node.elseblock, EmptyNode): + self.evaluate_codeblock(node.elseblock) + + def get_variable(self, varname: str) -> int: + return 0 + + def assignment(self, node: AssignmentNode) -> None: + assert isinstance(node, AssignmentNode) + self.assignments[node.var_name] = node.value # Save a reference to the value node + if node.value.ast_id: + self.reverse_assignment[node.value.ast_id] = node + self.assign_vals[node.var_name] = self.evaluate_statement(node.value) # Evaluate the value just in case + + def resolve_node(self, node: BaseNode, include_unknown_args: bool = False, id_loop_detect: T.Optional[T.List[str]] = None) -> T.Optional[T.Any]: + def quick_resolve(n: BaseNode, loop_detect: T.Optional[T.List[str]] = None) -> T.Any: + if loop_detect is None: + loop_detect = [] + if isinstance(n, IdNode): + assert isinstance(n.value, str) + if n.value in loop_detect or n.value not in self.assignments: + return [] + return quick_resolve(self.assignments[n.value], loop_detect = loop_detect + [n.value]) + elif isinstance(n, ElementaryNode): + return n.value + else: + return n + + if id_loop_detect is None: + id_loop_detect = [] + result = None + + if not isinstance(node, BaseNode): + return None + + assert node.ast_id + if node.ast_id in id_loop_detect: + return None # Loop detected + id_loop_detect += [node.ast_id] + + # Try to evaluate the value of the node + if isinstance(node, IdNode): + result = quick_resolve(node) + + elif isinstance(node, ElementaryNode): + result = node.value + + elif isinstance(node, NotNode): + result = self.resolve_node(node.value, include_unknown_args, id_loop_detect) + if isinstance(result, bool): + result = not result + + elif isinstance(node, ArrayNode): + result = node.args.arguments.copy() + + elif isinstance(node, ArgumentNode): + result = node.arguments.copy() + + elif isinstance(node, ArithmeticNode): + if node.operation != 'add': + return None # Only handle string and array concats + l = quick_resolve(node.left) + r = quick_resolve(node.right) + if isinstance(l, str) and isinstance(r, str): + result = l + r # String concatenation detected + else: + result = self.flatten_args(l, include_unknown_args, id_loop_detect) + self.flatten_args(r, include_unknown_args, id_loop_detect) + + elif isinstance(node, MethodNode): + src = quick_resolve(node.source_object) + margs = self.flatten_args(node.args.arguments, include_unknown_args, id_loop_detect) + mkwargs = {} # type: T.Dict[str, TYPE_nvar] + try: + if isinstance(src, str): + result = StringHolder(src, T.cast('Interpreter', self)).method_call(node.name, margs, mkwargs) + elif isinstance(src, bool): + result = BooleanHolder(src, T.cast('Interpreter', self)).method_call(node.name, margs, mkwargs) + elif isinstance(src, int): + result = IntegerHolder(src, T.cast('Interpreter', self)).method_call(node.name, margs, mkwargs) + elif isinstance(src, list): + result = ArrayHolder(src, T.cast('Interpreter', self)).method_call(node.name, margs, mkwargs) + elif isinstance(src, dict): + result = DictHolder(src, T.cast('Interpreter', self)).method_call(node.name, margs, mkwargs) + except mesonlib.MesonException: + return None + + # Ensure that the result is fully resolved (no more nodes) + if isinstance(result, BaseNode): + result = self.resolve_node(result, include_unknown_args, id_loop_detect) + elif isinstance(result, list): + new_res = [] # type: T.List[TYPE_nvar] + for i in result: + if isinstance(i, BaseNode): + resolved = self.resolve_node(i, include_unknown_args, id_loop_detect) + if resolved is not None: + new_res += self.flatten_args(resolved, include_unknown_args, id_loop_detect) + else: + new_res += [i] + result = new_res + + return result + + def flatten_args(self, args_raw: T.Union[TYPE_nvar, T.Sequence[TYPE_nvar]], include_unknown_args: bool = False, id_loop_detect: T.Optional[T.List[str]] = None) -> T.List[TYPE_nvar]: + # Make sure we are always dealing with lists + if isinstance(args_raw, list): + args = args_raw + else: + args = [args_raw] + + flattened_args = [] # type: T.List[TYPE_nvar] + + # Resolve the contents of args + for i in args: + if isinstance(i, BaseNode): + resolved = self.resolve_node(i, include_unknown_args, id_loop_detect) + if resolved is not None: + if not isinstance(resolved, list): + resolved = [resolved] + flattened_args += resolved + elif isinstance(i, (str, bool, int, float)) or include_unknown_args: + flattened_args += [i] + return flattened_args + + def flatten_kwargs(self, kwargs: T.Dict[str, TYPE_nvar], include_unknown_args: bool = False) -> T.Dict[str, TYPE_nvar]: + flattened_kwargs = {} + for key, val in kwargs.items(): + if isinstance(val, BaseNode): + resolved = self.resolve_node(val, include_unknown_args) + if resolved is not None: + flattened_kwargs[key] = resolved + elif isinstance(val, (str, bool, int, float)) or include_unknown_args: + flattened_kwargs[key] = val + return flattened_kwargs diff --git a/vendored-meson/meson/mesonbuild/ast/introspection.py b/vendored-meson/meson/mesonbuild/ast/introspection.py new file mode 100644 index 000000000000..d66e73f3e320 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/ast/introspection.py @@ -0,0 +1,364 @@ +# Copyright 2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool + +from __future__ import annotations +import argparse +import copy +import os +import typing as T + +from .. import compilers, environment, mesonlib, optinterpreter +from .. import coredata as cdata +from ..build import Executable, Jar, SharedLibrary, SharedModule, StaticLibrary +from ..compilers import detect_compiler_for +from ..interpreterbase import InvalidArguments +from ..mesonlib import MachineChoice, OptionKey +from ..mparser import BaseNode, ArithmeticNode, ArrayNode, ElementaryNode, IdNode, FunctionNode, StringNode +from .interpreter import AstInterpreter + +if T.TYPE_CHECKING: + from ..build import BuildTarget + from ..interpreterbase import TYPE_nvar + from .visitor import AstVisitor + + +# TODO: it would be nice to not have to duplicate this +BUILD_TARGET_FUNCTIONS = [ + 'executable', 'jar', 'library', 'shared_library', 'shared_module', + 'static_library', 'both_libraries' +] + +class IntrospectionHelper(argparse.Namespace): + # mimic an argparse namespace + def __init__(self, cross_file: str): + super().__init__() + self.cross_file = cross_file # type: str + self.native_file = None # type: str + self.cmd_line_options = {} # type: T.Dict[str, str] + + def __eq__(self, other: object) -> bool: + return NotImplemented + +class IntrospectionInterpreter(AstInterpreter): + # Interpreter to detect the options without a build directory + # Most of the code is stolen from interpreter.Interpreter + def __init__(self, + source_root: str, + subdir: str, + backend: str, + visitors: T.Optional[T.List[AstVisitor]] = None, + cross_file: T.Optional[str] = None, + subproject: str = '', + subproject_dir: str = 'subprojects', + env: T.Optional[environment.Environment] = None): + visitors = visitors if visitors is not None else [] + super().__init__(source_root, subdir, subproject, visitors=visitors) + + options = IntrospectionHelper(cross_file) + self.cross_file = cross_file + if env is None: + self.environment = environment.Environment(source_root, None, options) + else: + self.environment = env + self.subproject_dir = subproject_dir + self.coredata = self.environment.get_coredata() + self.backend = backend + self.default_options = {OptionKey('backend'): self.backend} + self.project_data = {} # type: T.Dict[str, T.Any] + self.targets = [] # type: T.List[T.Dict[str, T.Any]] + self.dependencies = [] # type: T.List[T.Dict[str, T.Any]] + self.project_node = None # type: BaseNode + + self.funcs.update({ + 'add_languages': self.func_add_languages, + 'dependency': self.func_dependency, + 'executable': self.func_executable, + 'jar': self.func_jar, + 'library': self.func_library, + 'project': self.func_project, + 'shared_library': self.func_shared_lib, + 'shared_module': self.func_shared_module, + 'static_library': self.func_static_lib, + 'both_libraries': self.func_both_lib, + }) + + def func_project(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None: + if self.project_node: + raise InvalidArguments('Second call to project()') + self.project_node = node + if len(args) < 1: + raise InvalidArguments('Not enough arguments to project(). Needs at least the project name.') + + proj_name = args[0] + proj_vers = kwargs.get('version', 'undefined') + proj_langs = self.flatten_args(args[1:]) + if isinstance(proj_vers, ElementaryNode): + proj_vers = proj_vers.value + if not isinstance(proj_vers, str): + proj_vers = 'undefined' + self.project_data = {'descriptive_name': proj_name, 'version': proj_vers} + + optfile = os.path.join(self.source_root, self.subdir, 'meson.options') + if not os.path.exists(optfile): + optfile = os.path.join(self.source_root, self.subdir, 'meson_options.txt') + if os.path.exists(optfile): + oi = optinterpreter.OptionInterpreter(self.subproject) + oi.process(optfile) + self.coredata.update_project_options(oi.options) + + def_opts = self.flatten_args(kwargs.get('default_options', [])) + _project_default_options = mesonlib.stringlistify(def_opts) + self.project_default_options = cdata.create_options_dict(_project_default_options, self.subproject) + self.default_options.update(self.project_default_options) + self.coredata.set_default_options(self.default_options, self.subproject, self.environment) + + if not self.is_subproject() and 'subproject_dir' in kwargs: + spdirname = kwargs['subproject_dir'] + if isinstance(spdirname, StringNode): + assert isinstance(spdirname.value, str) + self.subproject_dir = spdirname.value + if not self.is_subproject(): + self.project_data['subprojects'] = [] + subprojects_dir = os.path.join(self.source_root, self.subproject_dir) + if os.path.isdir(subprojects_dir): + for i in os.listdir(subprojects_dir): + if os.path.isdir(os.path.join(subprojects_dir, i)): + self.do_subproject(i) + + self.coredata.init_backend_options(self.backend) + options = {k: v for k, v in self.environment.options.items() if k.is_backend()} + + self.coredata.set_options(options) + self._add_languages(proj_langs, True, MachineChoice.HOST) + self._add_languages(proj_langs, True, MachineChoice.BUILD) + + def do_subproject(self, dirname: str) -> None: + subproject_dir_abs = os.path.join(self.environment.get_source_dir(), self.subproject_dir) + subpr = os.path.join(subproject_dir_abs, dirname) + try: + subi = IntrospectionInterpreter(subpr, '', self.backend, cross_file=self.cross_file, subproject=dirname, subproject_dir=self.subproject_dir, env=self.environment, visitors=self.visitors) + subi.analyze() + subi.project_data['name'] = dirname + self.project_data['subprojects'] += [subi.project_data] + except (mesonlib.MesonException, RuntimeError): + return + + def func_add_languages(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None: + kwargs = self.flatten_kwargs(kwargs) + required = kwargs.get('required', True) + if isinstance(required, cdata.UserFeatureOption): + required = required.is_enabled() + if 'native' in kwargs: + native = kwargs.get('native', False) + self._add_languages(args, required, MachineChoice.BUILD if native else MachineChoice.HOST) + else: + for for_machine in [MachineChoice.BUILD, MachineChoice.HOST]: + self._add_languages(args, required, for_machine) + + def _add_languages(self, raw_langs: T.List[TYPE_nvar], required: bool, for_machine: MachineChoice) -> None: + langs = [] # type: T.List[str] + for l in self.flatten_args(raw_langs): + if isinstance(l, str): + langs.append(l) + elif isinstance(l, StringNode): + langs.append(l.value) + + for lang in sorted(langs, key=compilers.sort_clink): + lang = lang.lower() + if lang not in self.coredata.compilers[for_machine]: + try: + comp = detect_compiler_for(self.environment, lang, for_machine, True) + except mesonlib.MesonException: + # do we even care about introspecting this language? + if required: + raise + else: + continue + if self.subproject: + options = {} + for k in comp.get_options(): + v = copy.copy(self.coredata.options[k]) + k = k.evolve(subproject=self.subproject) + options[k] = v + self.coredata.add_compiler_options(options, lang, for_machine, self.environment) + + def func_dependency(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> None: + args = self.flatten_args(args) + kwargs = self.flatten_kwargs(kwargs) + if not args: + return + name = args[0] + has_fallback = 'fallback' in kwargs + required = kwargs.get('required', True) + version = kwargs.get('version', []) + if not isinstance(version, list): + version = [version] + if isinstance(required, ElementaryNode): + required = required.value + if not isinstance(required, bool): + required = False + self.dependencies += [{ + 'name': name, + 'required': required, + 'version': version, + 'has_fallback': has_fallback, + 'conditional': node.condition_level > 0, + 'node': node + }] + + def build_target(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs_raw: T.Dict[str, TYPE_nvar], targetclass: T.Type[BuildTarget]) -> T.Optional[T.Dict[str, T.Any]]: + args = self.flatten_args(args) + if not args or not isinstance(args[0], str): + return None + name = args[0] + srcqueue = [node] + extra_queue = [] + + # Process the sources BEFORE flattening the kwargs, to preserve the original nodes + if 'sources' in kwargs_raw: + srcqueue += mesonlib.listify(kwargs_raw['sources']) + + if 'extra_files' in kwargs_raw: + extra_queue += mesonlib.listify(kwargs_raw['extra_files']) + + kwargs = self.flatten_kwargs(kwargs_raw, True) + + def traverse_nodes(inqueue: T.List[BaseNode]) -> T.List[BaseNode]: + res = [] # type: T.List[BaseNode] + while inqueue: + curr = inqueue.pop(0) + arg_node = None + assert isinstance(curr, BaseNode) + if isinstance(curr, FunctionNode): + arg_node = curr.args + elif isinstance(curr, ArrayNode): + arg_node = curr.args + elif isinstance(curr, IdNode): + # Try to resolve the ID and append the node to the queue + assert isinstance(curr.value, str) + var_name = curr.value + if var_name in self.assignments: + tmp_node = self.assignments[var_name] + if isinstance(tmp_node, (ArrayNode, IdNode, FunctionNode)): + inqueue += [tmp_node] + elif isinstance(curr, ArithmeticNode): + inqueue += [curr.left, curr.right] + if arg_node is None: + continue + arg_nodes = arg_node.arguments.copy() + # Pop the first element if the function is a build target function + if isinstance(curr, FunctionNode) and curr.func_name in BUILD_TARGET_FUNCTIONS: + arg_nodes.pop(0) + elementary_nodes = [x for x in arg_nodes if isinstance(x, (str, StringNode))] + inqueue += [x for x in arg_nodes if isinstance(x, (FunctionNode, ArrayNode, IdNode, ArithmeticNode))] + if elementary_nodes: + res += [curr] + return res + + source_nodes = traverse_nodes(srcqueue) + extraf_nodes = traverse_nodes(extra_queue) + + # Make sure nothing can crash when creating the build class + kwargs_reduced = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs and k in {'install', 'build_by_default', 'build_always'}} + kwargs_reduced = {k: v.value if isinstance(v, ElementaryNode) else v for k, v in kwargs_reduced.items()} + kwargs_reduced = {k: v for k, v in kwargs_reduced.items() if not isinstance(v, BaseNode)} + for_machine = MachineChoice.HOST + objects = [] # type: T.List[T.Any] + empty_sources = [] # type: T.List[T.Any] + # Passing the unresolved sources list causes errors + kwargs_reduced['_allow_no_sources'] = True + target = targetclass(name, self.subdir, self.subproject, for_machine, empty_sources, [], objects, + self.environment, self.coredata.compilers[for_machine], kwargs_reduced) + target.process_compilers_late() + + new_target = { + 'name': target.get_basename(), + 'id': target.get_id(), + 'type': target.get_typename(), + 'defined_in': os.path.normpath(os.path.join(self.source_root, self.subdir, environment.build_filename)), + 'subdir': self.subdir, + 'build_by_default': target.build_by_default, + 'installed': target.should_install(), + 'outputs': target.get_outputs(), + 'sources': source_nodes, + 'extra_files': extraf_nodes, + 'kwargs': kwargs, + 'node': node, + } + + self.targets += [new_target] + return new_target + + def build_library(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + default_library = self.coredata.get_option(OptionKey('default_library')) + if default_library == 'shared': + return self.build_target(node, args, kwargs, SharedLibrary) + elif default_library == 'static': + return self.build_target(node, args, kwargs, StaticLibrary) + elif default_library == 'both': + return self.build_target(node, args, kwargs, SharedLibrary) + return None + + def func_executable(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + return self.build_target(node, args, kwargs, Executable) + + def func_static_lib(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + return self.build_target(node, args, kwargs, StaticLibrary) + + def func_shared_lib(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + return self.build_target(node, args, kwargs, SharedLibrary) + + def func_both_lib(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + return self.build_target(node, args, kwargs, SharedLibrary) + + def func_shared_module(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + return self.build_target(node, args, kwargs, SharedModule) + + def func_library(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + return self.build_library(node, args, kwargs) + + def func_jar(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + return self.build_target(node, args, kwargs, Jar) + + def func_build_target(self, node: BaseNode, args: T.List[TYPE_nvar], kwargs: T.Dict[str, TYPE_nvar]) -> T.Optional[T.Dict[str, T.Any]]: + if 'target_type' not in kwargs: + return None + target_type = kwargs.pop('target_type') + if isinstance(target_type, ElementaryNode): + target_type = target_type.value + if target_type == 'executable': + return self.build_target(node, args, kwargs, Executable) + elif target_type == 'shared_library': + return self.build_target(node, args, kwargs, SharedLibrary) + elif target_type == 'static_library': + return self.build_target(node, args, kwargs, StaticLibrary) + elif target_type == 'both_libraries': + return self.build_target(node, args, kwargs, SharedLibrary) + elif target_type == 'library': + return self.build_library(node, args, kwargs) + elif target_type == 'jar': + return self.build_target(node, args, kwargs, Jar) + return None + + def is_subproject(self) -> bool: + return self.subproject != '' + + def analyze(self) -> None: + self.load_root_meson_file() + self.sanity_check_ast() + self.parse_project() + self.run() diff --git a/vendored-meson/meson/mesonbuild/ast/postprocess.py b/vendored-meson/meson/mesonbuild/ast/postprocess.py new file mode 100644 index 000000000000..09c339dd6ace --- /dev/null +++ b/vendored-meson/meson/mesonbuild/ast/postprocess.py @@ -0,0 +1,120 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool +from __future__ import annotations + +from .visitor import AstVisitor +import typing as T + +if T.TYPE_CHECKING: + from .. import mparser + +class AstIndentationGenerator(AstVisitor): + def __init__(self) -> None: + self.level = 0 + + def visit_default_func(self, node: mparser.BaseNode) -> None: + # Store the current level in the node + node.level = self.level + + def visit_ArrayNode(self, node: mparser.ArrayNode) -> None: + self.visit_default_func(node) + self.level += 1 + node.args.accept(self) + self.level -= 1 + + def visit_DictNode(self, node: mparser.DictNode) -> None: + self.visit_default_func(node) + self.level += 1 + node.args.accept(self) + self.level -= 1 + + def visit_MethodNode(self, node: mparser.MethodNode) -> None: + self.visit_default_func(node) + node.source_object.accept(self) + self.level += 1 + node.args.accept(self) + self.level -= 1 + + def visit_FunctionNode(self, node: mparser.FunctionNode) -> None: + self.visit_default_func(node) + self.level += 1 + node.args.accept(self) + self.level -= 1 + + def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: + self.visit_default_func(node) + self.level += 1 + node.items.accept(self) + node.block.accept(self) + self.level -= 1 + + def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: + self.visit_default_func(node) + for i in node.ifs: + i.accept(self) + if node.elseblock: + self.level += 1 + node.elseblock.accept(self) + self.level -= 1 + + def visit_IfNode(self, node: mparser.IfNode) -> None: + self.visit_default_func(node) + self.level += 1 + node.condition.accept(self) + node.block.accept(self) + self.level -= 1 + +class AstIDGenerator(AstVisitor): + def __init__(self) -> None: + self.counter = {} # type: T.Dict[str, int] + + def visit_default_func(self, node: mparser.BaseNode) -> None: + name = type(node).__name__ + if name not in self.counter: + self.counter[name] = 0 + node.ast_id = name + '#' + str(self.counter[name]) + self.counter[name] += 1 + +class AstConditionLevel(AstVisitor): + def __init__(self) -> None: + self.condition_level = 0 + + def visit_default_func(self, node: mparser.BaseNode) -> None: + node.condition_level = self.condition_level + + def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: + self.visit_default_func(node) + self.condition_level += 1 + node.items.accept(self) + node.block.accept(self) + self.condition_level -= 1 + + def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: + self.visit_default_func(node) + for i in node.ifs: + i.accept(self) + if node.elseblock: + self.condition_level += 1 + node.elseblock.accept(self) + self.condition_level -= 1 + + def visit_IfNode(self, node: mparser.IfNode) -> None: + self.visit_default_func(node) + self.condition_level += 1 + node.condition.accept(self) + node.block.accept(self) + self.condition_level -= 1 diff --git a/vendored-meson/meson/mesonbuild/ast/printer.py b/vendored-meson/meson/mesonbuild/ast/printer.py new file mode 100644 index 000000000000..579a83d00011 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/ast/printer.py @@ -0,0 +1,399 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool +from __future__ import annotations + +from .. import mparser +from .visitor import AstVisitor +import re +import typing as T + +arithmic_map = { + 'add': '+', + 'sub': '-', + 'mod': '%', + 'mul': '*', + 'div': '/' +} + +class AstPrinter(AstVisitor): + def __init__(self, indent: int = 2, arg_newline_cutoff: int = 5, update_ast_line_nos: bool = False): + self.result = '' + self.indent = indent + self.arg_newline_cutoff = arg_newline_cutoff + self.ci = '' + self.is_newline = True + self.last_level = 0 + self.curr_line = 1 if update_ast_line_nos else None + + def post_process(self) -> None: + self.result = re.sub(r'\s+\n', '\n', self.result) + + def append(self, data: str, node: mparser.BaseNode) -> None: + self.last_level = node.level + if self.is_newline: + self.result += ' ' * (node.level * self.indent) + self.result += data + self.is_newline = False + + def append_padded(self, data: str, node: mparser.BaseNode) -> None: + if self.result and self.result[-1] not in [' ', '\n']: + data = ' ' + data + self.append(data + ' ', node) + + def newline(self) -> None: + self.result += '\n' + self.is_newline = True + if self.curr_line is not None: + self.curr_line += 1 + + def visit_BooleanNode(self, node: mparser.BooleanNode) -> None: + self.append('true' if node.value else 'false', node) + node.lineno = self.curr_line or node.lineno + + def visit_IdNode(self, node: mparser.IdNode) -> None: + assert isinstance(node.value, str) + self.append(node.value, node) + node.lineno = self.curr_line or node.lineno + + def visit_NumberNode(self, node: mparser.NumberNode) -> None: + self.append(str(node.value), node) + node.lineno = self.curr_line or node.lineno + + def escape(self, val: str) -> str: + return val.translate(str.maketrans({'\'': '\\\'', + '\\': '\\\\'})) + + def visit_StringNode(self, node: mparser.StringNode) -> None: + assert isinstance(node.value, str) + self.append("'" + self.escape(node.value) + "'", node) + node.lineno = self.curr_line or node.lineno + + def visit_FormatStringNode(self, node: mparser.FormatStringNode) -> None: + assert isinstance(node.value, str) + self.append("f'" + node.value + "'", node) + node.lineno = self.curr_line or node.lineno + + def visit_ContinueNode(self, node: mparser.ContinueNode) -> None: + self.append('continue', node) + node.lineno = self.curr_line or node.lineno + + def visit_BreakNode(self, node: mparser.BreakNode) -> None: + self.append('break', node) + node.lineno = self.curr_line or node.lineno + + def visit_ArrayNode(self, node: mparser.ArrayNode) -> None: + node.lineno = self.curr_line or node.lineno + self.append('[', node) + node.args.accept(self) + self.append(']', node) + + def visit_DictNode(self, node: mparser.DictNode) -> None: + node.lineno = self.curr_line or node.lineno + self.append('{', node) + node.args.accept(self) + self.append('}', node) + + def visit_OrNode(self, node: mparser.OrNode) -> None: + node.left.accept(self) + self.append_padded('or', node) + node.lineno = self.curr_line or node.lineno + node.right.accept(self) + + def visit_AndNode(self, node: mparser.AndNode) -> None: + node.left.accept(self) + self.append_padded('and', node) + node.lineno = self.curr_line or node.lineno + node.right.accept(self) + + def visit_ComparisonNode(self, node: mparser.ComparisonNode) -> None: + node.left.accept(self) + self.append_padded(node.ctype, node) + node.lineno = self.curr_line or node.lineno + node.right.accept(self) + + def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None: + node.left.accept(self) + self.append_padded(arithmic_map[node.operation], node) + node.lineno = self.curr_line or node.lineno + node.right.accept(self) + + def visit_NotNode(self, node: mparser.NotNode) -> None: + node.lineno = self.curr_line or node.lineno + self.append_padded('not', node) + node.value.accept(self) + + def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None: + node.lineno = self.curr_line or node.lineno + for i in node.lines: + i.accept(self) + self.newline() + + def visit_IndexNode(self, node: mparser.IndexNode) -> None: + node.iobject.accept(self) + node.lineno = self.curr_line or node.lineno + self.append('[', node) + node.index.accept(self) + self.append(']', node) + + def visit_MethodNode(self, node: mparser.MethodNode) -> None: + node.lineno = self.curr_line or node.lineno + node.source_object.accept(self) + self.append('.' + node.name + '(', node) + node.args.accept(self) + self.append(')', node) + + def visit_FunctionNode(self, node: mparser.FunctionNode) -> None: + node.lineno = self.curr_line or node.lineno + self.append(node.func_name + '(', node) + node.args.accept(self) + self.append(')', node) + + def visit_AssignmentNode(self, node: mparser.AssignmentNode) -> None: + node.lineno = self.curr_line or node.lineno + self.append(node.var_name + ' = ', node) + node.value.accept(self) + + def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode) -> None: + node.lineno = self.curr_line or node.lineno + self.append(node.var_name + ' += ', node) + node.value.accept(self) + + def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: + node.lineno = self.curr_line or node.lineno + self.append_padded('foreach', node) + self.append_padded(', '.join(node.varnames), node) + self.append_padded(':', node) + node.items.accept(self) + self.newline() + node.block.accept(self) + self.append('endforeach', node) + + def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: + node.lineno = self.curr_line or node.lineno + prefix = '' + for i in node.ifs: + self.append_padded(prefix + 'if', node) + prefix = 'el' + i.accept(self) + if not isinstance(node.elseblock, mparser.EmptyNode): + self.append('else', node) + node.elseblock.accept(self) + self.append('endif', node) + + def visit_UMinusNode(self, node: mparser.UMinusNode) -> None: + node.lineno = self.curr_line or node.lineno + self.append_padded('-', node) + node.value.accept(self) + + def visit_IfNode(self, node: mparser.IfNode) -> None: + node.lineno = self.curr_line or node.lineno + node.condition.accept(self) + self.newline() + node.block.accept(self) + + def visit_TernaryNode(self, node: mparser.TernaryNode) -> None: + node.lineno = self.curr_line or node.lineno + node.condition.accept(self) + self.append_padded('?', node) + node.trueblock.accept(self) + self.append_padded(':', node) + node.falseblock.accept(self) + + def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None: + node.lineno = self.curr_line or node.lineno + break_args = (len(node.arguments) + len(node.kwargs)) > self.arg_newline_cutoff + for i in node.arguments + list(node.kwargs.values()): + if not isinstance(i, (mparser.ElementaryNode, mparser.IndexNode)): + break_args = True + if break_args: + self.newline() + for i in node.arguments: + i.accept(self) + self.append(', ', node) + if break_args: + self.newline() + for key, val in node.kwargs.items(): + key.accept(self) + self.append_padded(':', node) + val.accept(self) + self.append(', ', node) + if break_args: + self.newline() + if break_args: + self.result = re.sub(r', \n$', '\n', self.result) + else: + self.result = re.sub(r', $', '', self.result) + +class AstJSONPrinter(AstVisitor): + def __init__(self) -> None: + self.result = {} # type: T.Dict[str, T.Any] + self.current = self.result + + def _accept(self, key: str, node: mparser.BaseNode) -> None: + old = self.current + data = {} # type: T.Dict[str, T.Any] + self.current = data + node.accept(self) + self.current = old + self.current[key] = data + + def _accept_list(self, key: str, nodes: T.Sequence[mparser.BaseNode]) -> None: + old = self.current + datalist = [] # type: T.List[T.Dict[str, T.Any]] + for i in nodes: + self.current = {} + i.accept(self) + datalist += [self.current] + self.current = old + self.current[key] = datalist + + def _raw_accept(self, node: mparser.BaseNode, data: T.Dict[str, T.Any]) -> None: + old = self.current + self.current = data + node.accept(self) + self.current = old + + def setbase(self, node: mparser.BaseNode) -> None: + self.current['node'] = type(node).__name__ + self.current['lineno'] = node.lineno + self.current['colno'] = node.colno + self.current['end_lineno'] = node.end_lineno + self.current['end_colno'] = node.end_colno + + def visit_default_func(self, node: mparser.BaseNode) -> None: + self.setbase(node) + + def gen_ElementaryNode(self, node: mparser.ElementaryNode) -> None: + self.current['value'] = node.value + self.setbase(node) + + def visit_BooleanNode(self, node: mparser.BooleanNode) -> None: + self.gen_ElementaryNode(node) + + def visit_IdNode(self, node: mparser.IdNode) -> None: + self.gen_ElementaryNode(node) + + def visit_NumberNode(self, node: mparser.NumberNode) -> None: + self.gen_ElementaryNode(node) + + def visit_StringNode(self, node: mparser.StringNode) -> None: + self.gen_ElementaryNode(node) + + def visit_FormatStringNode(self, node: mparser.FormatStringNode) -> None: + self.gen_ElementaryNode(node) + + def visit_ArrayNode(self, node: mparser.ArrayNode) -> None: + self._accept('args', node.args) + self.setbase(node) + + def visit_DictNode(self, node: mparser.DictNode) -> None: + self._accept('args', node.args) + self.setbase(node) + + def visit_OrNode(self, node: mparser.OrNode) -> None: + self._accept('left', node.left) + self._accept('right', node.right) + self.setbase(node) + + def visit_AndNode(self, node: mparser.AndNode) -> None: + self._accept('left', node.left) + self._accept('right', node.right) + self.setbase(node) + + def visit_ComparisonNode(self, node: mparser.ComparisonNode) -> None: + self._accept('left', node.left) + self._accept('right', node.right) + self.current['ctype'] = node.ctype + self.setbase(node) + + def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None: + self._accept('left', node.left) + self._accept('right', node.right) + self.current['op'] = arithmic_map[node.operation] + self.setbase(node) + + def visit_NotNode(self, node: mparser.NotNode) -> None: + self._accept('right', node.value) + self.setbase(node) + + def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None: + self._accept_list('lines', node.lines) + self.setbase(node) + + def visit_IndexNode(self, node: mparser.IndexNode) -> None: + self._accept('object', node.iobject) + self._accept('index', node.index) + self.setbase(node) + + def visit_MethodNode(self, node: mparser.MethodNode) -> None: + self._accept('object', node.source_object) + self._accept('args', node.args) + self.current['name'] = node.name + self.setbase(node) + + def visit_FunctionNode(self, node: mparser.FunctionNode) -> None: + self._accept('args', node.args) + self.current['name'] = node.func_name + self.setbase(node) + + def visit_AssignmentNode(self, node: mparser.AssignmentNode) -> None: + self._accept('value', node.value) + self.current['var_name'] = node.var_name + self.setbase(node) + + def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode) -> None: + self._accept('value', node.value) + self.current['var_name'] = node.var_name + self.setbase(node) + + def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: + self._accept('items', node.items) + self._accept('block', node.block) + self.current['varnames'] = node.varnames + self.setbase(node) + + def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: + self._accept_list('ifs', node.ifs) + self._accept('else', node.elseblock) + self.setbase(node) + + def visit_UMinusNode(self, node: mparser.UMinusNode) -> None: + self._accept('right', node.value) + self.setbase(node) + + def visit_IfNode(self, node: mparser.IfNode) -> None: + self._accept('condition', node.condition) + self._accept('block', node.block) + self.setbase(node) + + def visit_TernaryNode(self, node: mparser.TernaryNode) -> None: + self._accept('condition', node.condition) + self._accept('true', node.trueblock) + self._accept('false', node.falseblock) + self.setbase(node) + + def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None: + self._accept_list('positional', node.arguments) + kwargs_list = [] # type: T.List[T.Dict[str, T.Dict[str, T.Any]]] + for key, val in node.kwargs.items(): + key_res = {} # type: T.Dict[str, T.Any] + val_res = {} # type: T.Dict[str, T.Any] + self._raw_accept(key, key_res) + self._raw_accept(val, val_res) + kwargs_list += [{'key': key_res, 'val': val_res}] + self.current['kwargs'] = kwargs_list + self.setbase(node) diff --git a/vendored-meson/meson/mesonbuild/ast/visitor.py b/vendored-meson/meson/mesonbuild/ast/visitor.py new file mode 100644 index 000000000000..8a0e77bb8e3b --- /dev/null +++ b/vendored-meson/meson/mesonbuild/ast/visitor.py @@ -0,0 +1,146 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool +from __future__ import annotations + +import typing as T + +if T.TYPE_CHECKING: + from .. import mparser + +class AstVisitor: + def __init__(self) -> None: + pass + + def visit_default_func(self, node: mparser.BaseNode) -> None: + pass + + def visit_BooleanNode(self, node: mparser.BooleanNode) -> None: + self.visit_default_func(node) + + def visit_IdNode(self, node: mparser.IdNode) -> None: + self.visit_default_func(node) + + def visit_NumberNode(self, node: mparser.NumberNode) -> None: + self.visit_default_func(node) + + def visit_StringNode(self, node: mparser.StringNode) -> None: + self.visit_default_func(node) + + def visit_FormatStringNode(self, node: mparser.FormatStringNode) -> None: + self.visit_default_func(node) + + def visit_ContinueNode(self, node: mparser.ContinueNode) -> None: + self.visit_default_func(node) + + def visit_BreakNode(self, node: mparser.BreakNode) -> None: + self.visit_default_func(node) + + def visit_ArrayNode(self, node: mparser.ArrayNode) -> None: + self.visit_default_func(node) + node.args.accept(self) + + def visit_DictNode(self, node: mparser.DictNode) -> None: + self.visit_default_func(node) + node.args.accept(self) + + def visit_EmptyNode(self, node: mparser.EmptyNode) -> None: + self.visit_default_func(node) + + def visit_OrNode(self, node: mparser.OrNode) -> None: + self.visit_default_func(node) + node.left.accept(self) + node.right.accept(self) + + def visit_AndNode(self, node: mparser.AndNode) -> None: + self.visit_default_func(node) + node.left.accept(self) + node.right.accept(self) + + def visit_ComparisonNode(self, node: mparser.ComparisonNode) -> None: + self.visit_default_func(node) + node.left.accept(self) + node.right.accept(self) + + def visit_ArithmeticNode(self, node: mparser.ArithmeticNode) -> None: + self.visit_default_func(node) + node.left.accept(self) + node.right.accept(self) + + def visit_NotNode(self, node: mparser.NotNode) -> None: + self.visit_default_func(node) + node.value.accept(self) + + def visit_CodeBlockNode(self, node: mparser.CodeBlockNode) -> None: + self.visit_default_func(node) + for i in node.lines: + i.accept(self) + + def visit_IndexNode(self, node: mparser.IndexNode) -> None: + self.visit_default_func(node) + node.iobject.accept(self) + node.index.accept(self) + + def visit_MethodNode(self, node: mparser.MethodNode) -> None: + self.visit_default_func(node) + node.source_object.accept(self) + node.args.accept(self) + + def visit_FunctionNode(self, node: mparser.FunctionNode) -> None: + self.visit_default_func(node) + node.args.accept(self) + + def visit_AssignmentNode(self, node: mparser.AssignmentNode) -> None: + self.visit_default_func(node) + node.value.accept(self) + + def visit_PlusAssignmentNode(self, node: mparser.PlusAssignmentNode) -> None: + self.visit_default_func(node) + node.value.accept(self) + + def visit_ForeachClauseNode(self, node: mparser.ForeachClauseNode) -> None: + self.visit_default_func(node) + node.items.accept(self) + node.block.accept(self) + + def visit_IfClauseNode(self, node: mparser.IfClauseNode) -> None: + self.visit_default_func(node) + for i in node.ifs: + i.accept(self) + node.elseblock.accept(self) + + def visit_UMinusNode(self, node: mparser.UMinusNode) -> None: + self.visit_default_func(node) + node.value.accept(self) + + def visit_IfNode(self, node: mparser.IfNode) -> None: + self.visit_default_func(node) + node.condition.accept(self) + node.block.accept(self) + + def visit_TernaryNode(self, node: mparser.TernaryNode) -> None: + self.visit_default_func(node) + node.condition.accept(self) + node.trueblock.accept(self) + node.falseblock.accept(self) + + def visit_ArgumentNode(self, node: mparser.ArgumentNode) -> None: + self.visit_default_func(node) + for i in node.arguments: + i.accept(self) + for key, val in node.kwargs.items(): + key.accept(self) + val.accept(self) diff --git a/vendored-meson/meson/mesonbuild/backend/__init__.py b/vendored-meson/meson/mesonbuild/backend/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/vendored-meson/meson/mesonbuild/backend/backends.py b/vendored-meson/meson/mesonbuild/backend/backends.py new file mode 100644 index 000000000000..73741a4412ee --- /dev/null +++ b/vendored-meson/meson/mesonbuild/backend/backends.py @@ -0,0 +1,2030 @@ +# Copyright 2012-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from collections import OrderedDict +from dataclasses import dataclass, InitVar +from functools import lru_cache +from itertools import chain +from pathlib import Path +import copy +import enum +import json +import os +import pickle +import re +import shutil +import typing as T +import hashlib + +from .. import build +from .. import dependencies +from .. import programs +from .. import mesonlib +from .. import mlog +from ..compilers import LANGUAGES_USING_LDFLAGS, detect +from ..mesonlib import ( + File, MachineChoice, MesonException, OrderedSet, + classify_unity_sources, OptionKey, join_args, + ExecutableSerialisation +) + +if T.TYPE_CHECKING: + from .._typing import ImmutableListProtocol + from ..arglist import CompilerArgs + from ..compilers import Compiler + from ..environment import Environment + from ..interpreter import Interpreter, Test + from ..linkers.linkers import StaticLinker + from ..mesonlib import FileMode, FileOrString + + from typing_extensions import TypedDict + + _ALL_SOURCES_TYPE = T.List[T.Union[File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]] + + class TargetIntrospectionData(TypedDict): + + language: str + compiler: T.List[str] + parameters: T.List[str] + sources: T.List[str] + generated_sources: T.List[str] + + +# Languages that can mix with C or C++ but don't support unity builds yet +# because the syntax we use for unity builds is specific to C/++/ObjC/++. +# Assembly files cannot be unitified and neither can LLVM IR files +LANGS_CANT_UNITY = ('d', 'fortran', 'vala') + +@dataclass(eq=False) +class RegenInfo: + source_dir: str + build_dir: str + depfiles: T.List[str] + +class TestProtocol(enum.Enum): + + EXITCODE = 0 + TAP = 1 + GTEST = 2 + RUST = 3 + + @classmethod + def from_str(cls, string: str) -> 'TestProtocol': + if string == 'exitcode': + return cls.EXITCODE + elif string == 'tap': + return cls.TAP + elif string == 'gtest': + return cls.GTEST + elif string == 'rust': + return cls.RUST + raise MesonException(f'unknown test format {string}') + + def __str__(self) -> str: + cls = type(self) + if self is cls.EXITCODE: + return 'exitcode' + elif self is cls.GTEST: + return 'gtest' + elif self is cls.RUST: + return 'rust' + return 'tap' + + +@dataclass(eq=False) +class CleanTrees: + ''' + Directories outputted by custom targets that have to be manually cleaned + because on Linux `ninja clean` only deletes empty directories. + ''' + build_dir: str + trees: T.List[str] + +@dataclass(eq=False) +class InstallData: + source_dir: str + build_dir: str + prefix: str + libdir: str + strip_bin: T.List[str] + # TODO: in python 3.8 or with typing_Extensions this could be: + # `T.Union[T.Literal['preserve'], int]`, which would be more accurate. + install_umask: T.Union[str, int] + mesonintrospect: T.List[str] + version: str + + def __post_init__(self) -> None: + self.targets: T.List[TargetInstallData] = [] + self.headers: T.List[InstallDataBase] = [] + self.man: T.List[InstallDataBase] = [] + self.emptydir: T.List[InstallEmptyDir] = [] + self.data: T.List[InstallDataBase] = [] + self.symlinks: T.List[InstallSymlinkData] = [] + self.install_scripts: T.List[ExecutableSerialisation] = [] + self.install_subdirs: T.List[SubdirInstallData] = [] + +@dataclass(eq=False) +class TargetInstallData: + fname: str + outdir: str + outdir_name: InitVar[T.Optional[str]] + strip: bool + install_name_mappings: T.Mapping[str, str] + rpath_dirs_to_remove: T.Set[bytes] + install_rpath: str + # TODO: install_mode should just always be a FileMode object + install_mode: T.Optional['FileMode'] + subproject: str + optional: bool = False + tag: T.Optional[str] = None + can_strip: bool = False + + def __post_init__(self, outdir_name: T.Optional[str]) -> None: + if outdir_name is None: + outdir_name = os.path.join('{prefix}', self.outdir) + self.out_name = os.path.join(outdir_name, os.path.basename(self.fname)) + +@dataclass(eq=False) +class InstallEmptyDir: + path: str + install_mode: 'FileMode' + subproject: str + tag: T.Optional[str] = None + +@dataclass(eq=False) +class InstallDataBase: + path: str + install_path: str + install_path_name: str + install_mode: 'FileMode' + subproject: str + tag: T.Optional[str] = None + data_type: T.Optional[str] = None + +@dataclass(eq=False) +class InstallSymlinkData: + target: str + name: str + install_path: str + subproject: str + tag: T.Optional[str] = None + allow_missing: bool = False + +# cannot use dataclass here because "exclude" is out of order +class SubdirInstallData(InstallDataBase): + def __init__(self, path: str, install_path: str, install_path_name: str, + install_mode: 'FileMode', exclude: T.Tuple[T.Set[str], T.Set[str]], + subproject: str, tag: T.Optional[str] = None, data_type: T.Optional[str] = None): + super().__init__(path, install_path, install_path_name, install_mode, subproject, tag, data_type) + self.exclude = exclude + + +@dataclass(eq=False) +class TestSerialisation: + name: str + project_name: str + suite: T.List[str] + fname: T.List[str] + is_cross_built: bool + exe_wrapper: T.Optional[programs.ExternalProgram] + needs_exe_wrapper: bool + is_parallel: bool + cmd_args: T.List[str] + env: build.EnvironmentVariables + should_fail: bool + timeout: T.Optional[int] + workdir: T.Optional[str] + extra_paths: T.List[str] + protocol: TestProtocol + priority: int + cmd_is_built: bool + cmd_is_exe: bool + depends: T.List[str] + version: str + verbose: bool + + def __post_init__(self) -> None: + if self.exe_wrapper is not None: + assert isinstance(self.exe_wrapper, programs.ExternalProgram) + + +def get_backend_from_name(backend: str, build: T.Optional[build.Build] = None, interpreter: T.Optional['Interpreter'] = None) -> T.Optional['Backend']: + if backend == 'ninja': + from . import ninjabackend + return ninjabackend.NinjaBackend(build, interpreter) + elif backend == 'vs': + from . import vs2010backend + return vs2010backend.autodetect_vs_version(build, interpreter) + elif backend == 'vs2010': + from . import vs2010backend + return vs2010backend.Vs2010Backend(build, interpreter) + elif backend == 'vs2012': + from . import vs2012backend + return vs2012backend.Vs2012Backend(build, interpreter) + elif backend == 'vs2013': + from . import vs2013backend + return vs2013backend.Vs2013Backend(build, interpreter) + elif backend == 'vs2015': + from . import vs2015backend + return vs2015backend.Vs2015Backend(build, interpreter) + elif backend == 'vs2017': + from . import vs2017backend + return vs2017backend.Vs2017Backend(build, interpreter) + elif backend == 'vs2019': + from . import vs2019backend + return vs2019backend.Vs2019Backend(build, interpreter) + elif backend == 'vs2022': + from . import vs2022backend + return vs2022backend.Vs2022Backend(build, interpreter) + elif backend == 'xcode': + from . import xcodebackend + return xcodebackend.XCodeBackend(build, interpreter) + elif backend == 'none': + from . import nonebackend + return nonebackend.NoneBackend(build, interpreter) + return None + + +def get_genvslite_backend(genvsname: str, build: T.Optional[build.Build] = None, interpreter: T.Optional['Interpreter'] = None) -> T.Optional['Backend']: + if genvsname == 'vs2022': + from . import vs2022backend + return vs2022backend.Vs2022Backend(build, interpreter, gen_lite = True) + return None + +# This class contains the basic functionality that is needed by all backends. +# Feel free to move stuff in and out of it as you see fit. +class Backend: + + environment: T.Optional['Environment'] + name = '' + + def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional['Interpreter']): + # Make it possible to construct a dummy backend + # This is used for introspection without a build directory + if build is None: + self.environment = None + return + self.build = build + self.interpreter = interpreter + self.environment = build.environment + self.processed_targets: T.Set[str] = set() + self.build_dir = self.environment.get_build_dir() + self.source_dir = self.environment.get_source_dir() + self.build_to_src = mesonlib.relpath(self.environment.get_source_dir(), + self.environment.get_build_dir()) + self.src_to_build = mesonlib.relpath(self.environment.get_build_dir(), + self.environment.get_source_dir()) + + # If requested via 'capture = True', returns captured compile args per + # target (e.g. captured_args[target]) that can be used later, for example, + # to populate things like intellisense fields in generated visual studio + # projects (as is the case when using '--genvslite'). + # + # 'vslite_ctx' is only provided when + # we expect this backend setup/generation to make use of previously captured + # compile args (as is the case when using '--genvslite'). + def generate(self, capture: bool = False, vslite_ctx: dict = None) -> T.Optional[dict]: + raise RuntimeError(f'generate is not implemented in {type(self).__name__}') + + def get_target_filename(self, t: T.Union[build.Target, build.CustomTargetIndex], *, warn_multi_output: bool = True) -> str: + if isinstance(t, build.CustomTarget): + if warn_multi_output and len(t.get_outputs()) != 1: + mlog.warning(f'custom_target {t.name!r} has more than one output! ' + f'Using the first one. Consider using `{t.name}[0]`.') + filename = t.get_outputs()[0] + elif isinstance(t, build.CustomTargetIndex): + filename = t.get_outputs()[0] + else: + assert isinstance(t, build.BuildTarget), t + filename = t.get_filename() + return os.path.join(self.get_target_dir(t), filename) + + def get_target_filename_abs(self, target: T.Union[build.Target, build.CustomTargetIndex]) -> str: + return os.path.join(self.environment.get_build_dir(), self.get_target_filename(target)) + + def get_source_dir_include_args(self, target: build.BuildTarget, compiler: 'Compiler', *, absolute_path: bool = False) -> T.List[str]: + curdir = target.get_subdir() + if absolute_path: + lead = self.source_dir + else: + lead = self.build_to_src + tmppath = os.path.normpath(os.path.join(lead, curdir)) + return compiler.get_include_args(tmppath, False) + + def get_build_dir_include_args(self, target: build.BuildTarget, compiler: 'Compiler', *, absolute_path: bool = False) -> T.List[str]: + if absolute_path: + curdir = os.path.join(self.build_dir, target.get_subdir()) + else: + curdir = target.get_subdir() + if curdir == '': + curdir = '.' + return compiler.get_include_args(curdir, False) + + def get_target_filename_for_linking(self, target: T.Union[build.Target, build.CustomTargetIndex]) -> T.Optional[str]: + # On some platforms (msvc for instance), the file that is used for + # dynamic linking is not the same as the dynamic library itself. This + # file is called an import library, and we want to link against that. + # On all other platforms, we link to the library directly. + if isinstance(target, build.SharedLibrary): + link_lib = target.get_import_filename() or target.get_filename() + return os.path.join(self.get_target_dir(target), link_lib) + elif isinstance(target, build.StaticLibrary): + return os.path.join(self.get_target_dir(target), target.get_filename()) + elif isinstance(target, (build.CustomTarget, build.CustomTargetIndex)): + if not target.is_linkable_target(): + raise MesonException(f'Tried to link against custom target "{target.name}", which is not linkable.') + return os.path.join(self.get_target_dir(target), target.get_filename()) + elif isinstance(target, build.Executable): + if target.import_filename: + return os.path.join(self.get_target_dir(target), target.get_import_filename()) + else: + return None + raise AssertionError(f'BUG: Tried to link to {target!r} which is not linkable') + + @lru_cache(maxsize=None) + def get_target_dir(self, target: T.Union[build.Target, build.CustomTargetIndex]) -> str: + if isinstance(target, build.RunTarget): + # this produces no output, only a dummy top-level name + dirname = '' + elif self.environment.coredata.get_option(OptionKey('layout')) == 'mirror': + dirname = target.get_subdir() + else: + dirname = 'meson-out' + return dirname + + def get_target_dir_relative_to(self, t: build.Target, o: build.Target) -> str: + '''Get a target dir relative to another target's directory''' + target_dir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(t)) + othert_dir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(o)) + return os.path.relpath(target_dir, othert_dir) + + def get_target_source_dir(self, target: build.Target) -> str: + # if target dir is empty, avoid extraneous trailing / from os.path.join() + target_dir = self.get_target_dir(target) + if target_dir: + return os.path.join(self.build_to_src, target_dir) + return self.build_to_src + + def get_target_private_dir(self, target: T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]) -> str: + return os.path.join(self.get_target_filename(target, warn_multi_output=False) + '.p') + + def get_target_private_dir_abs(self, target: T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]) -> str: + return os.path.join(self.environment.get_build_dir(), self.get_target_private_dir(target)) + + @lru_cache(maxsize=None) + def get_target_generated_dir( + self, target: T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex], + gensrc: T.Union[build.CustomTarget, build.CustomTargetIndex, build.GeneratedList], + src: str) -> str: + """ + Takes a BuildTarget, a generator source (CustomTarget or GeneratedList), + and a generated source filename. + Returns the full path of the generated source relative to the build root + """ + # CustomTarget generators output to the build dir of the CustomTarget + if isinstance(gensrc, (build.CustomTarget, build.CustomTargetIndex)): + return os.path.join(self.get_target_dir(gensrc), src) + # GeneratedList generators output to the private build directory of the + # target that the GeneratedList is used in + return os.path.join(self.get_target_private_dir(target), src) + + def get_unity_source_file(self, target: T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex], + suffix: str, number: int) -> mesonlib.File: + # There is a potential conflict here, but it is unlikely that + # anyone both enables unity builds and has a file called foo-unity.cpp. + osrc = f'{target.name}-unity{number}.{suffix}' + return mesonlib.File.from_built_file(self.get_target_private_dir(target), osrc) + + def generate_unity_files(self, target: build.BuildTarget, unity_src: str) -> T.List[mesonlib.File]: + abs_files: T.List[str] = [] + result: T.List[mesonlib.File] = [] + compsrcs = classify_unity_sources(target.compilers.values(), unity_src) + unity_size = target.get_option(OptionKey('unity_size')) + assert isinstance(unity_size, int), 'for mypy' + + def init_language_file(suffix: str, unity_file_number: int) -> T.TextIO: + unity_src = self.get_unity_source_file(target, suffix, unity_file_number) + outfileabs = unity_src.absolute_path(self.environment.get_source_dir(), + self.environment.get_build_dir()) + outfileabs_tmp = outfileabs + '.tmp' + abs_files.append(outfileabs) + outfileabs_tmp_dir = os.path.dirname(outfileabs_tmp) + if not os.path.exists(outfileabs_tmp_dir): + os.makedirs(outfileabs_tmp_dir) + result.append(unity_src) + return open(outfileabs_tmp, 'w', encoding='utf-8') + + # For each language, generate unity source files and return the list + for comp, srcs in compsrcs.items(): + files_in_current = unity_size + 1 + unity_file_number = 0 + # TODO: this could be simplified with an algorithm that pre-sorts + # the sources into the size of chunks we want + ofile = None + for src in srcs: + if files_in_current >= unity_size: + if ofile: + ofile.close() + ofile = init_language_file(comp.get_default_suffix(), unity_file_number) + unity_file_number += 1 + files_in_current = 0 + ofile.write(f'#include<{src}>\n') + files_in_current += 1 + if ofile: + ofile.close() + + for x in abs_files: + mesonlib.replace_if_different(x, x + '.tmp') + return result + + @staticmethod + def relpath(todir: str, fromdir: str) -> str: + return os.path.relpath(os.path.join('dummyprefixdir', todir), + os.path.join('dummyprefixdir', fromdir)) + + def flatten_object_list(self, target: build.BuildTarget, proj_dir_to_build_root: str = '' + ) -> T.Tuple[T.List[str], T.List[build.BuildTargetTypes]]: + obj_list, deps = self._flatten_object_list(target, target.get_objects(), proj_dir_to_build_root) + return list(dict.fromkeys(obj_list)), deps + + def determine_ext_objs(self, objects: build.ExtractedObjects, proj_dir_to_build_root: str = '') -> T.List[str]: + obj_list, _ = self._flatten_object_list(objects.target, [objects], proj_dir_to_build_root) + return list(dict.fromkeys(obj_list)) + + def _flatten_object_list(self, target: build.BuildTarget, + objects: T.Sequence[T.Union[str, 'File', build.ExtractedObjects]], + proj_dir_to_build_root: str) -> T.Tuple[T.List[str], T.List[build.BuildTargetTypes]]: + obj_list: T.List[str] = [] + deps: T.List[build.BuildTargetTypes] = [] + for obj in objects: + if isinstance(obj, str): + o = os.path.join(proj_dir_to_build_root, + self.build_to_src, target.get_subdir(), obj) + obj_list.append(o) + elif isinstance(obj, mesonlib.File): + if obj.is_built: + o = os.path.join(proj_dir_to_build_root, + obj.rel_to_builddir(self.build_to_src)) + obj_list.append(o) + else: + o = os.path.join(proj_dir_to_build_root, + self.build_to_src) + obj_list.append(obj.rel_to_builddir(o)) + elif isinstance(obj, build.ExtractedObjects): + if obj.recursive: + objs, d = self._flatten_object_list(obj.target, obj.objlist, proj_dir_to_build_root) + obj_list.extend(objs) + deps.extend(d) + obj_list.extend(self._determine_ext_objs(obj, proj_dir_to_build_root)) + deps.append(obj.target) + else: + raise MesonException('Unknown data type in object list.') + return obj_list, deps + + @staticmethod + def is_swift_target(target: build.BuildTarget) -> bool: + for s in target.sources: + if s.endswith('swift'): + return True + return False + + def determine_swift_dep_dirs(self, target: build.BuildTarget) -> T.List[str]: + result: T.List[str] = [] + for l in target.link_targets: + result.append(self.get_target_private_dir_abs(l)) + return result + + def get_executable_serialisation( + self, cmd: T.Sequence[T.Union[programs.ExternalProgram, build.BuildTarget, build.CustomTarget, File, str]], + workdir: T.Optional[str] = None, + extra_bdeps: T.Optional[T.List[build.BuildTarget]] = None, + capture: T.Optional[bool] = None, + feed: T.Optional[bool] = None, + env: T.Optional[build.EnvironmentVariables] = None, + tag: T.Optional[str] = None, + verbose: bool = False, + installdir_map: T.Optional[T.Dict[str, str]] = None) -> 'ExecutableSerialisation': + + # XXX: cmd_args either need to be lowered to strings, or need to be checked for non-string arguments, right? + exe, *raw_cmd_args = cmd + if isinstance(exe, programs.ExternalProgram): + exe_cmd = exe.get_command() + exe_for_machine = exe.for_machine + elif isinstance(exe, build.BuildTarget): + exe_cmd = [self.get_target_filename_abs(exe)] + exe_for_machine = exe.for_machine + elif isinstance(exe, build.CustomTarget): + # The output of a custom target can either be directly runnable + # or not, that is, a script, a native binary or a cross compiled + # binary when exe wrapper is available and when it is not. + # This implementation is not exhaustive but it works in the + # common cases. + exe_cmd = [self.get_target_filename_abs(exe)] + exe_for_machine = MachineChoice.BUILD + elif isinstance(exe, mesonlib.File): + exe_cmd = [exe.rel_to_builddir(self.environment.source_dir)] + exe_for_machine = MachineChoice.BUILD + else: + exe_cmd = [exe] + exe_for_machine = MachineChoice.BUILD + + cmd_args: T.List[str] = [] + for c in raw_cmd_args: + if isinstance(c, programs.ExternalProgram): + p = c.get_path() + assert isinstance(p, str) + cmd_args.append(p) + elif isinstance(c, (build.BuildTarget, build.CustomTarget)): + cmd_args.append(self.get_target_filename_abs(c)) + elif isinstance(c, mesonlib.File): + cmd_args.append(c.rel_to_builddir(self.environment.source_dir)) + else: + cmd_args.append(c) + + machine = self.environment.machines[exe_for_machine] + if machine.is_windows() or machine.is_cygwin(): + extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps or []) + else: + extra_paths = [] + + is_cross_built = not self.environment.machines.matches_build_machine(exe_for_machine) + if is_cross_built and self.environment.need_exe_wrapper(): + exe_wrapper = self.environment.get_exe_wrapper() + if not exe_wrapper or not exe_wrapper.found(): + msg = 'An exe_wrapper is needed but was not found. Please define one ' \ + 'in cross file and check the command and/or add it to PATH.' + raise MesonException(msg) + else: + if exe_cmd[0].endswith('.jar'): + exe_cmd = ['java', '-jar'] + exe_cmd + elif exe_cmd[0].endswith('.exe') and not (mesonlib.is_windows() or mesonlib.is_cygwin() or mesonlib.is_wsl()): + exe_cmd = ['mono'] + exe_cmd + exe_wrapper = None + + workdir = workdir or self.environment.get_build_dir() + return ExecutableSerialisation(exe_cmd + cmd_args, env, + exe_wrapper, workdir, + extra_paths, capture, feed, tag, verbose, installdir_map) + + def as_meson_exe_cmdline(self, exe: T.Union[str, mesonlib.File, build.BuildTarget, build.CustomTarget, programs.ExternalProgram], + cmd_args: T.Sequence[T.Union[str, mesonlib.File, build.BuildTarget, build.CustomTarget, programs.ExternalProgram]], + workdir: T.Optional[str] = None, + extra_bdeps: T.Optional[T.List[build.BuildTarget]] = None, + capture: T.Optional[bool] = None, + feed: T.Optional[bool] = None, + force_serialize: bool = False, + env: T.Optional[build.EnvironmentVariables] = None, + verbose: bool = False) -> T.Tuple[T.Sequence[T.Union[str, File, build.Target, programs.ExternalProgram]], str]: + ''' + Serialize an executable for running with a generator or a custom target + ''' + cmd: T.List[T.Union[str, mesonlib.File, build.BuildTarget, build.CustomTarget, programs.ExternalProgram]] = [] + cmd.append(exe) + cmd.extend(cmd_args) + es = self.get_executable_serialisation(cmd, workdir, extra_bdeps, capture, feed, env, verbose=verbose) + reasons: T.List[str] = [] + if es.extra_paths: + reasons.append('to set PATH') + + if es.exe_wrapper: + reasons.append('to use exe_wrapper') + + if workdir: + reasons.append('to set workdir') + + if any('\n' in c for c in es.cmd_args): + reasons.append('because command contains newlines') + + if env and env.varnames: + reasons.append('to set env') + + # force_serialize passed to this function means that the VS backend has + # decided it absolutely cannot use real commands. This is "always", + # because it's not clear what will work (other than compilers) and so + # we don't bother to handle a variety of common cases that probably do + # work. + # + # It's also overridden for a few conditions that can't be handled + # inside a command line + + can_use_env = not force_serialize + force_serialize = force_serialize or bool(reasons) + + if capture: + reasons.append('to capture output') + if feed: + reasons.append('to feed input') + + if can_use_env and reasons == ['to set env'] and shutil.which('env'): + envlist = [] + for k, v in env.get_env({}).items(): + envlist.append(f'{k}={v}') + return ['env'] + envlist + es.cmd_args, ', '.join(reasons) + + if not force_serialize: + if not capture and not feed: + return es.cmd_args, '' + args: T.List[str] = [] + if capture: + args += ['--capture', str(capture)] + if feed: + args += ['--feed', str(feed)] + + return ( + self.environment.get_build_command() + ['--internal', 'exe'] + args + ['--'] + es.cmd_args, + ', '.join(reasons) + ) + + if isinstance(exe, (programs.ExternalProgram, + build.BuildTarget, build.CustomTarget)): + basename = os.path.basename(exe.name) + elif isinstance(exe, mesonlib.File): + basename = os.path.basename(exe.fname) + else: + basename = os.path.basename(exe) + + # Can't just use exe.name here; it will likely be run more than once + # Take a digest of the cmd args, env, workdir, capture, and feed. This + # avoids collisions and also makes the name deterministic over + # regenerations which avoids a rebuild by Ninja because the cmdline + # stays the same. + hasher = hashlib.sha1() + if es.env: + es.env.hash(hasher) + hasher.update(bytes(str(es.cmd_args), encoding='utf-8')) + hasher.update(bytes(str(es.workdir), encoding='utf-8')) + hasher.update(bytes(str(capture), encoding='utf-8')) + hasher.update(bytes(str(feed), encoding='utf-8')) + digest = hasher.hexdigest() + scratch_file = f'meson_exe_{basename}_{digest}.dat' + exe_data = os.path.join(self.environment.get_scratch_dir(), scratch_file) + with open(exe_data, 'wb') as f: + pickle.dump(es, f) + return (self.environment.get_build_command() + ['--internal', 'exe', '--unpickle', exe_data], + ', '.join(reasons)) + + def serialize_tests(self) -> T.Tuple[str, str]: + test_data = os.path.join(self.environment.get_scratch_dir(), 'meson_test_setup.dat') + with open(test_data, 'wb') as datafile: + self.write_test_file(datafile) + benchmark_data = os.path.join(self.environment.get_scratch_dir(), 'meson_benchmark_setup.dat') + with open(benchmark_data, 'wb') as datafile: + self.write_benchmark_file(datafile) + return test_data, benchmark_data + + def determine_linker_and_stdlib_args(self, target: build.BuildTarget) -> T.Tuple[T.Union['Compiler', 'StaticLinker'], T.List[str]]: + ''' + If we're building a static library, there is only one static linker. + Otherwise, we query the target for the dynamic linker. + ''' + if isinstance(target, build.StaticLibrary): + return self.build.static_linker[target.for_machine], [] + l, stdlib_args = target.get_clink_dynamic_linker_and_stdlibs() + return l, stdlib_args + + @staticmethod + def _libdir_is_system(libdir: str, compilers: T.Mapping[str, 'Compiler'], env: 'Environment') -> bool: + libdir = os.path.normpath(libdir) + for cc in compilers.values(): + if libdir in cc.get_library_dirs(env): + return True + return False + + def get_external_rpath_dirs(self, target: build.BuildTarget) -> T.Set[str]: + args: T.List[str] = [] + for lang in LANGUAGES_USING_LDFLAGS: + try: + e = self.environment.coredata.get_external_link_args(target.for_machine, lang) + if isinstance(e, str): + args.append(e) + else: + args.extend(e) + except Exception: + pass + return self.get_rpath_dirs_from_link_args(args) + + @staticmethod + def get_rpath_dirs_from_link_args(args: T.List[str]) -> T.Set[str]: + dirs: T.Set[str] = set() + # Match rpath formats: + # -Wl,-rpath= + # -Wl,-rpath, + rpath_regex = re.compile(r'-Wl,-rpath[=,]([^,]+)') + # Match solaris style compat runpath formats: + # -Wl,-R + # -Wl,-R, + runpath_regex = re.compile(r'-Wl,-R[,]?([^,]+)') + # Match symbols formats: + # -Wl,--just-symbols= + # -Wl,--just-symbols, + symbols_regex = re.compile(r'-Wl,--just-symbols[=,]([^,]+)') + for arg in args: + rpath_match = rpath_regex.match(arg) + if rpath_match: + for dir in rpath_match.group(1).split(':'): + dirs.add(dir) + runpath_match = runpath_regex.match(arg) + if runpath_match: + for dir in runpath_match.group(1).split(':'): + # The symbols arg is an rpath if the path is a directory + if Path(dir).is_dir(): + dirs.add(dir) + symbols_match = symbols_regex.match(arg) + if symbols_match: + for dir in symbols_match.group(1).split(':'): + # Prevent usage of --just-symbols to specify rpath + if Path(dir).is_dir(): + raise MesonException(f'Invalid arg for --just-symbols, {dir} is a directory.') + return dirs + + @lru_cache(maxsize=None) + def rpaths_for_non_system_absolute_shared_libraries(self, target: build.BuildTarget, exclude_system: bool = True) -> 'ImmutableListProtocol[str]': + paths: OrderedSet[str] = OrderedSet() + srcdir = self.environment.get_source_dir() + + for dep in target.external_deps: + if dep.type_name not in {'library', 'pkgconfig'}: + continue + for libpath in dep.link_args: + # For all link args that are absolute paths to a library file, add RPATH args + if not os.path.isabs(libpath): + continue + libdir = os.path.dirname(libpath) + if exclude_system and self._libdir_is_system(libdir, target.compilers, self.environment): + # No point in adding system paths. + continue + # Don't remove rpaths specified in LDFLAGS. + if libdir in self.get_external_rpath_dirs(target): + continue + # Windows doesn't support rpaths, but we use this function to + # emulate rpaths by setting PATH + # .dll is there for mingw gcc + if os.path.splitext(libpath)[1] not in {'.dll', '.lib', '.so', '.dylib'}: + continue + + try: + commonpath = os.path.commonpath((libdir, srcdir)) + except ValueError: # when paths are on different drives on Windows + commonpath = '' + + if commonpath == srcdir: + rel_to_src = libdir[len(srcdir) + 1:] + assert not os.path.isabs(rel_to_src), f'rel_to_src: {rel_to_src} is absolute' + paths.add(os.path.join(self.build_to_src, rel_to_src)) + else: + paths.add(libdir) + # Don't remove rpaths specified by the dependency + paths.difference_update(self.get_rpath_dirs_from_link_args(dep.link_args)) + for i in chain(target.link_targets, target.link_whole_targets): + if isinstance(i, build.BuildTarget): + paths.update(self.rpaths_for_non_system_absolute_shared_libraries(i, exclude_system)) + return list(paths) + + # This may take other types + def determine_rpath_dirs(self, target: T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex] + ) -> T.Tuple[str, ...]: + result: OrderedSet[str] + if self.environment.coredata.get_option(OptionKey('layout')) == 'mirror': + # Need a copy here + result = OrderedSet(target.get_link_dep_subdirs()) + else: + result = OrderedSet() + result.add('meson-out') + if isinstance(target, build.BuildTarget): + result.update(self.rpaths_for_non_system_absolute_shared_libraries(target)) + target.rpath_dirs_to_remove.update([d.encode('utf-8') for d in result]) + return tuple(result) + + @staticmethod + def canonicalize_filename(fname: str) -> str: + parts = Path(fname).parts + hashed = '' + if len(parts) > 5: + temp = '/'.join(parts[-5:]) + # is it shorter to hash the beginning of the path? + if len(fname) > len(temp) + 41: + hashed = hashlib.sha1(fname.encode('utf-8')).hexdigest() + '_' + fname = temp + for ch in ('/', '\\', ':'): + fname = fname.replace(ch, '_') + return hashed + fname + + def object_filename_from_source(self, target: build.BuildTarget, source: 'FileOrString') -> str: + assert isinstance(source, mesonlib.File) + if isinstance(target, build.CompileTarget): + return target.sources_map[source] + build_dir = self.environment.get_build_dir() + rel_src = source.rel_to_builddir(self.build_to_src) + + # foo.vala files compile down to foo.c and then foo.c.o, not foo.vala.o + if rel_src.endswith(('.vala', '.gs')): + # See description in generate_vala_compile for this logic. + if source.is_built: + if os.path.isabs(rel_src): + rel_src = rel_src[len(build_dir) + 1:] + rel_src = os.path.relpath(rel_src, self.get_target_private_dir(target)) + else: + rel_src = os.path.basename(rel_src) + # A meson- prefixed directory is reserved; hopefully no-one creates a file name with such a weird prefix. + gen_source = 'meson-generated_' + rel_src[:-5] + '.c' + elif source.is_built: + if os.path.isabs(rel_src): + rel_src = rel_src[len(build_dir) + 1:] + targetdir = self.get_target_private_dir(target) + # A meson- prefixed directory is reserved; hopefully no-one creates a file name with such a weird prefix. + gen_source = 'meson-generated_' + os.path.relpath(rel_src, targetdir) + else: + if os.path.isabs(rel_src): + # Use the absolute path directly to avoid file name conflicts + gen_source = rel_src + else: + gen_source = os.path.relpath(os.path.join(build_dir, rel_src), + os.path.join(self.environment.get_source_dir(), target.get_subdir())) + machine = self.environment.machines[target.for_machine] + return self.canonicalize_filename(gen_source) + '.' + machine.get_object_suffix() + + def _determine_ext_objs(self, extobj: 'build.ExtractedObjects', proj_dir_to_build_root: str) -> T.List[str]: + result: T.List[str] = [] + + targetdir = self.get_target_private_dir(extobj.target) + + # Merge sources and generated sources + raw_sources = list(extobj.srclist) + for gensrc in extobj.genlist: + for r in gensrc.get_outputs(): + path = self.get_target_generated_dir(extobj.target, gensrc, r) + dirpart, fnamepart = os.path.split(path) + raw_sources.append(File(True, dirpart, fnamepart)) + + # Filter out headers and all non-source files + sources: T.List['FileOrString'] = [] + for s in raw_sources: + if self.environment.is_source(s): + sources.append(s) + elif self.environment.is_object(s): + result.append(s.relative_name()) + + # MSVC generate an object file for PCH + if extobj.pch: + for lang, pch in extobj.target.pch.items(): + compiler = extobj.target.compilers[lang] + if compiler.get_argument_syntax() == 'msvc': + objname = self.get_msvc_pch_objname(lang, pch) + result.append(os.path.join(proj_dir_to_build_root, targetdir, objname)) + + # extobj could contain only objects and no sources + if not sources: + return result + + # With unity builds, sources don't map directly to objects, + # we only support extracting all the objects in this mode, + # so just return all object files. + if extobj.target.is_unity: + compsrcs = classify_unity_sources(extobj.target.compilers.values(), sources) + sources = [] + unity_size = extobj.target.get_option(OptionKey('unity_size')) + assert isinstance(unity_size, int), 'for mypy' + + for comp, srcs in compsrcs.items(): + if comp.language in LANGS_CANT_UNITY: + sources += srcs + continue + for i in range(len(srcs) // unity_size + 1): + _src = self.get_unity_source_file(extobj.target, + comp.get_default_suffix(), i) + sources.append(_src) + + for osrc in sources: + objname = self.object_filename_from_source(extobj.target, osrc) + objpath = os.path.join(proj_dir_to_build_root, targetdir, objname) + result.append(objpath) + + return result + + def get_pch_include_args(self, compiler: 'Compiler', target: build.BuildTarget) -> T.List[str]: + args: T.List[str] = [] + pchpath = self.get_target_private_dir(target) + includeargs = compiler.get_include_args(pchpath, False) + p = target.get_pch(compiler.get_language()) + if p: + args += compiler.get_pch_use_args(pchpath, p[0]) + return includeargs + args + + def get_msvc_pch_objname(self, lang: str, pch: T.List[str]) -> str: + if len(pch) == 1: + # Same name as in create_msvc_pch_implementation() below. + return f'meson_pch-{lang}.obj' + return os.path.splitext(pch[1])[0] + '.obj' + + def create_msvc_pch_implementation(self, target: build.BuildTarget, lang: str, pch_header: str) -> str: + # We have to include the language in the file name, otherwise + # pch.c and pch.cpp will both end up as pch.obj in VS backends. + impl_name = f'meson_pch-{lang}.{lang}' + pch_rel_to_build = os.path.join(self.get_target_private_dir(target), impl_name) + # Make sure to prepend the build dir, since the working directory is + # not defined. Otherwise, we might create the file in the wrong path. + pch_file = os.path.join(self.build_dir, pch_rel_to_build) + os.makedirs(os.path.dirname(pch_file), exist_ok=True) + + content = f'#include "{os.path.basename(pch_header)}"' + pch_file_tmp = pch_file + '.tmp' + with open(pch_file_tmp, 'w', encoding='utf-8') as f: + f.write(content) + mesonlib.replace_if_different(pch_file, pch_file_tmp) + return pch_rel_to_build + + def target_uses_pch(self, target: build.BuildTarget) -> bool: + try: + return T.cast('bool', target.get_option(OptionKey('b_pch'))) + except KeyError: + return False + + @staticmethod + def escape_extra_args(args: T.List[str]) -> T.List[str]: + # all backslashes in defines are doubly-escaped + extra_args: T.List[str] = [] + for arg in args: + if arg.startswith(('-D', '/D')): + arg = arg.replace('\\', '\\\\') + extra_args.append(arg) + + return extra_args + + def get_no_stdlib_args(self, target: 'build.BuildTarget', compiler: 'Compiler') -> T.List[str]: + if compiler.language in self.build.stdlibs[target.for_machine]: + return compiler.get_no_stdinc_args() + return [] + + def generate_basic_compiler_args(self, target: build.BuildTarget, compiler: 'Compiler', no_warn_args: bool = False) -> 'CompilerArgs': + # Create an empty commands list, and start adding arguments from + # various sources in the order in which they must override each other + # starting from hard-coded defaults followed by build options and so on. + commands = compiler.compiler_args() + + copt_proxy = target.get_options() + # First, the trivial ones that are impossible to override. + # + # Add -nostdinc/-nostdinc++ if needed; can't be overridden + commands += self.get_no_stdlib_args(target, compiler) + # Add things like /NOLOGO or -pipe; usually can't be overridden + commands += compiler.get_always_args() + # Only add warning-flags by default if the buildtype enables it, and if + # we weren't explicitly asked to not emit warnings (for Vala, f.ex) + if no_warn_args: + commands += compiler.get_no_warn_args() + else: + # warning_level is a string, but mypy can't determine that + commands += compiler.get_warn_args(T.cast('str', target.get_option(OptionKey('warning_level')))) + # Add -Werror if werror=true is set in the build options set on the + # command-line or default_options inside project(). This only sets the + # action to be done for warnings if/when they are emitted, so it's ok + # to set it after get_no_warn_args() or get_warn_args(). + if target.get_option(OptionKey('werror')): + commands += compiler.get_werror_args() + # Add compile args for c_* or cpp_* build options set on the + # command-line or default_options inside project(). + commands += compiler.get_option_compile_args(copt_proxy) + + # Add buildtype args: optimization level, debugging, etc. + buildtype = target.get_option(OptionKey('buildtype')) + assert isinstance(buildtype, str), 'for mypy' + commands += compiler.get_buildtype_args(buildtype) + + optimization = target.get_option(OptionKey('optimization')) + assert isinstance(optimization, str), 'for mypy' + commands += compiler.get_optimization_args(optimization) + + debug = target.get_option(OptionKey('debug')) + assert isinstance(debug, bool), 'for mypy' + commands += compiler.get_debug_args(debug) + + # Add compile args added using add_project_arguments() + commands += self.build.get_project_args(compiler, target.subproject, target.for_machine) + # Add compile args added using add_global_arguments() + # These override per-project arguments + commands += self.build.get_global_args(compiler, target.for_machine) + # Compile args added from the env: CFLAGS/CXXFLAGS, etc, or the cross + # file. We want these to override all the defaults, but not the + # per-target compile args. + commands += self.environment.coredata.get_external_args(target.for_machine, compiler.get_language()) + # Using both /Z7 or /ZI and /Zi at the same times produces a compiler warning. + # We do not add /Z7 or /ZI by default. If it is being used it is because the user has explicitly enabled it. + # /Zi needs to be removed in that case to avoid cl's warning to that effect (D9025 : overriding '/Zi' with '/ZI') + if ('/Zi' in commands) and (('/ZI' in commands) or ('/Z7' in commands)): + commands.remove('/Zi') + # Always set -fPIC for shared libraries + if isinstance(target, build.SharedLibrary): + commands += compiler.get_pic_args() + # Set -fPIC for static libraries by default unless explicitly disabled + if isinstance(target, build.StaticLibrary) and target.pic: + commands += compiler.get_pic_args() + elif isinstance(target, (build.StaticLibrary, build.Executable)) and target.pie: + commands += compiler.get_pie_args() + # Add compile args needed to find external dependencies. Link args are + # added while generating the link command. + # NOTE: We must preserve the order in which external deps are + # specified, so we reverse the list before iterating over it. + for dep in reversed(target.get_external_deps()): + if not dep.found(): + continue + + if compiler.language == 'vala': + if dep.type_name == 'pkgconfig': + assert isinstance(dep, dependencies.ExternalDependency) + if dep.name == 'glib-2.0' and dep.version_reqs is not None: + for req in dep.version_reqs: + if req.startswith(('>=', '==')): + commands += ['--target-glib', req[2:]] + break + commands += ['--pkg', dep.name] + elif isinstance(dep, dependencies.ExternalLibrary): + commands += dep.get_link_args('vala') + else: + commands += compiler.get_dependency_compile_args(dep) + # Qt needs -fPIC for executables + # XXX: We should move to -fPIC for all executables + if isinstance(target, build.Executable): + commands += dep.get_exe_args(compiler) + # For 'automagic' deps: Boost and GTest. Also dependency('threads'). + # pkg-config puts the thread flags itself via `Cflags:` + # Fortran requires extra include directives. + if compiler.language == 'fortran': + for lt in chain(target.link_targets, target.link_whole_targets): + priv_dir = self.get_target_private_dir(lt) + commands += compiler.get_include_args(priv_dir, False) + return commands + + def build_target_link_arguments(self, compiler: 'Compiler', deps: T.List[build.Target]) -> T.List[str]: + args: T.List[str] = [] + for d in deps: + if not d.is_linkable_target(): + raise RuntimeError(f'Tried to link with a non-library target "{d.get_basename()}".') + arg = self.get_target_filename_for_linking(d) + if not arg: + continue + if compiler.get_language() == 'd': + arg = '-Wl,' + arg + else: + arg = compiler.get_linker_lib_prefix() + arg + args.append(arg) + return args + + def get_mingw_extra_paths(self, target: build.BuildTarget) -> T.List[str]: + paths: OrderedSet[str] = OrderedSet() + # The cross bindir + root = self.environment.properties[target.for_machine].get_root() + if root: + paths.add(os.path.join(root, 'bin')) + # The toolchain bindir + sys_root = self.environment.properties[target.for_machine].get_sys_root() + if sys_root: + paths.add(os.path.join(sys_root, 'bin')) + # Get program and library dirs from all target compilers + if isinstance(target, build.BuildTarget): + for cc in target.compilers.values(): + paths.update(cc.get_program_dirs(self.environment)) + paths.update(cc.get_library_dirs(self.environment)) + return list(paths) + + @classmethod + @lru_cache(maxsize=None) + def extract_dll_paths(cls, target: build.BuildTarget) -> T.Set[str]: + """Find paths to all DLLs needed for a given target, since + we link against import libs, and we don't know the actual + path of the DLLs. + + 1. If there are DLLs in the same directory than the .lib dir, use it + 2. If there is a sibbling directory named 'bin' with DLLs in it, use it + """ + results = set() + for dep in target.external_deps: + + if dep.type_name == 'pkgconfig': + # If by chance pkg-config knows the bin dir... + bindir = dep.get_pkgconfig_variable('bindir', [], default='') + if bindir: + results.add(bindir) + + for link_arg in dep.link_args: + if link_arg.startswith(('-l', '-L')): + link_arg = link_arg[2:] + p = Path(link_arg) + if not p.is_absolute(): + continue + + try: + p = p.resolve(strict=True) + except FileNotFoundError: + continue + + for _ in p.parent.glob('*.dll'): + # path contains dlls + results.add(str(p.parent)) + break + + else: + if p.is_file(): + p = p.parent + # Heuristic: replace *last* occurence of '/lib' + binpath = Path('/bin'.join(p.as_posix().rsplit('/lib', maxsplit=1))) + for _ in binpath.glob('*.dll'): + results.add(str(binpath)) + break + + for i in chain(target.link_targets, target.link_whole_targets): + if isinstance(i, build.BuildTarget): + results.update(cls.extract_dll_paths(i)) + + return results + + def determine_windows_extra_paths( + self, target: T.Union[build.BuildTarget, build.CustomTarget, programs.ExternalProgram, mesonlib.File, str], + extra_bdeps: T.Sequence[T.Union[build.BuildTarget, build.CustomTarget]]) -> T.List[str]: + """On Windows there is no such thing as an rpath. + + We must determine all locations of DLLs that this exe + links to and return them so they can be used in unit + tests. + """ + result: T.Set[str] = set() + prospectives: T.Set[build.BuildTargetTypes] = set() + if isinstance(target, build.BuildTarget): + prospectives.update(target.get_transitive_link_deps()) + # External deps + result.update(self.extract_dll_paths(target)) + + for bdep in extra_bdeps: + prospectives.add(bdep) + if isinstance(bdep, build.BuildTarget): + prospectives.update(bdep.get_transitive_link_deps()) + # Internal deps + for ld in prospectives: + dirseg = os.path.join(self.environment.get_build_dir(), self.get_target_dir(ld)) + result.add(dirseg) + if (isinstance(target, build.BuildTarget) and + not self.environment.machines.matches_build_machine(target.for_machine)): + result.update(self.get_mingw_extra_paths(target)) + return list(result) + + def write_benchmark_file(self, datafile: T.BinaryIO) -> None: + self.write_test_serialisation(self.build.get_benchmarks(), datafile) + + def write_test_file(self, datafile: T.BinaryIO) -> None: + self.write_test_serialisation(self.build.get_tests(), datafile) + + def create_test_serialisation(self, tests: T.List['Test']) -> T.List[TestSerialisation]: + arr: T.List[TestSerialisation] = [] + for t in sorted(tests, key=lambda tst: -1 * tst.priority): + exe = t.get_exe() + if isinstance(exe, programs.ExternalProgram): + cmd = exe.get_command() + else: + cmd = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(exe))] + if isinstance(exe, (build.BuildTarget, programs.ExternalProgram)): + test_for_machine = exe.for_machine + else: + # E.g. an external verifier or simulator program run on a generated executable. + # Can always be run without a wrapper. + test_for_machine = MachineChoice.BUILD + + # we allow passing compiled executables to tests, which may be cross built. + # We need to consider these as well when considering whether the target is cross or not. + for a in t.cmd_args: + if isinstance(a, build.BuildTarget): + if a.for_machine is MachineChoice.HOST: + test_for_machine = MachineChoice.HOST + break + + is_cross = self.environment.is_cross_build(test_for_machine) + exe_wrapper = self.environment.get_exe_wrapper() + machine = self.environment.machines[exe.for_machine] + if machine.is_windows() or machine.is_cygwin(): + extra_bdeps: T.List[T.Union[build.BuildTarget, build.CustomTarget]] = [] + if isinstance(exe, build.CustomTarget): + extra_bdeps = list(exe.get_transitive_build_target_deps()) + extra_paths = self.determine_windows_extra_paths(exe, extra_bdeps) + for a in t.cmd_args: + if isinstance(a, build.BuildTarget): + for p in self.determine_windows_extra_paths(a, []): + if p not in extra_paths: + extra_paths.append(p) + else: + extra_paths = [] + + cmd_args: T.List[str] = [] + depends: T.Set[build.Target] = set(t.depends) + if isinstance(exe, build.Target): + depends.add(exe) + for a in t.cmd_args: + if isinstance(a, build.Target): + depends.add(a) + elif isinstance(a, build.CustomTargetIndex): + depends.add(a.target) + + if isinstance(a, mesonlib.File): + a = os.path.join(self.environment.get_build_dir(), a.rel_to_builddir(self.build_to_src)) + cmd_args.append(a) + elif isinstance(a, str): + cmd_args.append(a) + elif isinstance(a, (build.Target, build.CustomTargetIndex)): + cmd_args.extend(self.construct_target_rel_paths(a, t.workdir)) + else: + raise MesonException('Bad object in test command.') + + t_env = copy.deepcopy(t.env) + if not machine.is_windows() and not machine.is_cygwin() and not machine.is_darwin(): + ld_lib_path: T.Set[str] = set() + for d in depends: + if isinstance(d, build.BuildTarget): + for l in d.get_all_link_deps(): + if isinstance(l, build.SharedLibrary): + ld_lib_path.add(os.path.join(self.environment.get_build_dir(), l.get_subdir())) + if ld_lib_path: + t_env.prepend('LD_LIBRARY_PATH', list(ld_lib_path), ':') + + ts = TestSerialisation(t.get_name(), t.project_name, t.suite, cmd, is_cross, + exe_wrapper, self.environment.need_exe_wrapper(), + t.is_parallel, cmd_args, t_env, + t.should_fail, t.timeout, t.workdir, + extra_paths, t.protocol, t.priority, + isinstance(exe, build.Target), + isinstance(exe, build.Executable), + [x.get_id() for x in depends], + self.environment.coredata.version, + t.verbose) + arr.append(ts) + return arr + + def write_test_serialisation(self, tests: T.List['Test'], datafile: T.BinaryIO) -> None: + pickle.dump(self.create_test_serialisation(tests), datafile) + + def construct_target_rel_paths(self, t: T.Union[build.Target, build.CustomTargetIndex], workdir: T.Optional[str]) -> T.List[str]: + target_dir = self.get_target_dir(t) + # ensure that test executables can be run when passed as arguments + if isinstance(t, build.Executable) and workdir is None: + target_dir = target_dir or '.' + + if isinstance(t, build.BuildTarget): + outputs = [t.get_filename()] + else: + assert isinstance(t, (build.CustomTarget, build.CustomTargetIndex)) + outputs = t.get_outputs() + + outputs = [os.path.join(target_dir, x) for x in outputs] + if workdir is not None: + assert os.path.isabs(workdir) + outputs = [os.path.join(self.environment.get_build_dir(), x) for x in outputs] + outputs = [os.path.relpath(x, workdir) for x in outputs] + return outputs + + def generate_depmf_install(self, d: InstallData) -> None: + depmf_path = self.build.dep_manifest_name + if depmf_path is None: + option_dir = self.environment.coredata.get_option(OptionKey('licensedir')) + assert isinstance(option_dir, str), 'for mypy' + if option_dir: + depmf_path = os.path.join(option_dir, 'depmf.json') + else: + return + ifilename = os.path.join(self.environment.get_build_dir(), 'depmf.json') + ofilename = os.path.join(self.environment.get_prefix(), depmf_path) + odirname = os.path.join(self.environment.get_prefix(), os.path.dirname(depmf_path)) + out_name = os.path.join('{prefix}', depmf_path) + out_dir = os.path.join('{prefix}', os.path.dirname(depmf_path)) + mfobj = {'type': 'dependency manifest', 'version': '1.0', + 'projects': {k: v.to_json() for k, v in self.build.dep_manifest.items()}} + with open(ifilename, 'w', encoding='utf-8') as f: + f.write(json.dumps(mfobj)) + # Copy file from, to, and with mode unchanged + d.data.append(InstallDataBase(ifilename, ofilename, out_name, None, '', + tag='devel', data_type='depmf')) + for m in self.build.dep_manifest.values(): + for ifilename, name in m.license_files: + ofilename = os.path.join(odirname, name.relative_name()) + out_name = os.path.join(out_dir, name.relative_name()) + d.data.append(InstallDataBase(ifilename, ofilename, out_name, None, + m.subproject, tag='devel', data_type='depmf')) + + def get_regen_filelist(self) -> T.List[str]: + '''List of all files whose alteration means that the build + definition needs to be regenerated.''' + deps = OrderedSet([str(Path(self.build_to_src) / df) + for df in self.interpreter.get_build_def_files()]) + if self.environment.is_cross_build(): + deps.update(self.environment.coredata.cross_files) + deps.update(self.environment.coredata.config_files) + deps.add('meson-private/coredata.dat') + self.check_clock_skew(deps) + return list(deps) + + def generate_regen_info(self) -> None: + deps = self.get_regen_filelist() + regeninfo = RegenInfo(self.environment.get_source_dir(), + self.environment.get_build_dir(), + deps) + filename = os.path.join(self.environment.get_scratch_dir(), + 'regeninfo.dump') + with open(filename, 'wb') as f: + pickle.dump(regeninfo, f) + + def check_clock_skew(self, file_list: T.Iterable[str]) -> None: + # If a file that leads to reconfiguration has a time + # stamp in the future, it will trigger an eternal reconfigure + # loop. + import time + now = time.time() + for f in file_list: + absf = os.path.join(self.environment.get_build_dir(), f) + ftime = os.path.getmtime(absf) + delta = ftime - now + # On Windows disk time stamps sometimes point + # to the future by a minuscule amount, less than + # 0.001 seconds. I don't know why. + if delta > 0.001: + raise MesonException(f'Clock skew detected. File {absf} has a time stamp {delta:.4f}s in the future.') + + def build_target_to_cmd_array(self, bt: T.Union[build.BuildTarget, programs.ExternalProgram]) -> T.List[str]: + if isinstance(bt, build.BuildTarget): + arr = [os.path.join(self.environment.get_build_dir(), self.get_target_filename(bt))] + else: + arr = bt.get_command() + return arr + + def replace_extra_args(self, args: T.List[str], genlist: 'build.GeneratedList') -> T.List[str]: + final_args: T.List[str] = [] + for a in args: + if a == '@EXTRA_ARGS@': + final_args += genlist.get_extra_args() + else: + final_args.append(a) + return final_args + + def replace_outputs(self, args: T.List[str], private_dir: str, output_list: T.List[str]) -> T.List[str]: + newargs: T.List[str] = [] + regex = re.compile(r'@OUTPUT(\d+)@') + for arg in args: + m = regex.search(arg) + while m is not None: + index = int(m.group(1)) + src = f'@OUTPUT{index}@' + arg = arg.replace(src, os.path.join(private_dir, output_list[index])) + m = regex.search(arg) + newargs.append(arg) + return newargs + + def get_build_by_default_targets(self) -> 'T.OrderedDict[str, T.Union[build.BuildTarget, build.CustomTarget]]': + result: 'T.OrderedDict[str, T.Union[build.BuildTarget, build.CustomTarget]]' = OrderedDict() + # Get all build and custom targets that must be built by default + for name, b in self.build.get_targets().items(): + if b.build_by_default: + result[name] = b + return result + + def get_testlike_targets(self, benchmark: bool = False) -> T.OrderedDict[str, T.Union[build.BuildTarget, build.CustomTarget]]: + result: T.OrderedDict[str, T.Union[build.BuildTarget, build.CustomTarget]] = OrderedDict() + targets = self.build.get_benchmarks() if benchmark else self.build.get_tests() + for t in targets: + exe = t.exe + if isinstance(exe, (build.CustomTarget, build.BuildTarget)): + result[exe.get_id()] = exe + for arg in t.cmd_args: + if not isinstance(arg, (build.CustomTarget, build.BuildTarget)): + continue + result[arg.get_id()] = arg + for dep in t.depends: + assert isinstance(dep, (build.CustomTarget, build.BuildTarget)) + result[dep.get_id()] = dep + return result + + @lru_cache(maxsize=None) + def get_custom_target_provided_by_generated_source(self, generated_source: build.CustomTarget) -> 'ImmutableListProtocol[str]': + libs: T.List[str] = [] + for f in generated_source.get_outputs(): + if self.environment.is_library(f): + libs.append(os.path.join(self.get_target_dir(generated_source), f)) + return libs + + @lru_cache(maxsize=None) + def get_custom_target_provided_libraries(self, target: T.Union[build.BuildTarget, build.CustomTarget]) -> 'ImmutableListProtocol[str]': + libs: T.List[str] = [] + for t in target.get_generated_sources(): + if not isinstance(t, build.CustomTarget): + continue + libs.extend(self.get_custom_target_provided_by_generated_source(t)) + return libs + + def get_custom_target_sources(self, target: build.CustomTarget) -> T.List[str]: + ''' + Custom target sources can be of various object types; strings, File, + BuildTarget, even other CustomTargets. + Returns the path to them relative to the build root directory. + ''' + srcs: T.List[str] = [] + for i in target.get_sources(): + if isinstance(i, str): + fname = [os.path.join(self.build_to_src, target.subdir, i)] + elif isinstance(i, build.BuildTarget): + fname = [self.get_target_filename(i)] + elif isinstance(i, (build.CustomTarget, build.CustomTargetIndex)): + fname = [os.path.join(self.get_custom_target_output_dir(i), p) for p in i.get_outputs()] + elif isinstance(i, build.GeneratedList): + fname = [os.path.join(self.get_target_private_dir(target), p) for p in i.get_outputs()] + elif isinstance(i, build.ExtractedObjects): + fname = self.determine_ext_objs(i) + elif isinstance(i, programs.ExternalProgram): + assert i.found(), "This shouldn't be possible" + assert i.path is not None, 'for mypy' + fname = [i.path] + else: + fname = [i.rel_to_builddir(self.build_to_src)] + if target.absolute_paths: + fname = [os.path.join(self.environment.get_build_dir(), f) for f in fname] + srcs += fname + return srcs + + def get_custom_target_depend_files(self, target: build.CustomTarget, absolute_paths: bool = False) -> T.List[str]: + deps: T.List[str] = [] + for i in target.depend_files: + if isinstance(i, mesonlib.File): + if absolute_paths: + deps.append(i.absolute_path(self.environment.get_source_dir(), + self.environment.get_build_dir())) + else: + deps.append(i.rel_to_builddir(self.build_to_src)) + else: + if absolute_paths: + deps.append(os.path.join(self.environment.get_source_dir(), target.subdir, i)) + else: + deps.append(os.path.join(self.build_to_src, target.subdir, i)) + return deps + + def get_custom_target_output_dir(self, target: T.Union[build.Target, build.CustomTargetIndex]) -> str: + # The XCode backend is special. A target foo/bar does + # not go to ${BUILDDIR}/foo/bar but instead to + # ${BUILDDIR}/${BUILDTYPE}/foo/bar. + # Currently we set the include dir to be the former, + # and not the latter. Thus we need this extra customisation + # point. If in the future we make include dirs et al match + # ${BUILDDIR}/${BUILDTYPE} instead, this becomes unnecessary. + return self.get_target_dir(target) + + @lru_cache(maxsize=None) + def get_normpath_target(self, source: str) -> str: + return os.path.normpath(source) + + def get_custom_target_dirs(self, target: build.CustomTarget, compiler: 'Compiler', *, + absolute_path: bool = False) -> T.List[str]: + custom_target_include_dirs: T.List[str] = [] + for i in target.get_generated_sources(): + # Generator output goes into the target private dir which is + # already in the include paths list. Only custom targets have their + # own target build dir. + if not isinstance(i, (build.CustomTarget, build.CustomTargetIndex)): + continue + idir = self.get_normpath_target(self.get_custom_target_output_dir(i)) + if not idir: + idir = '.' + if absolute_path: + idir = os.path.join(self.environment.get_build_dir(), idir) + if idir not in custom_target_include_dirs: + custom_target_include_dirs.append(idir) + return custom_target_include_dirs + + def get_custom_target_dir_include_args( + self, target: build.CustomTarget, compiler: 'Compiler', *, + absolute_path: bool = False) -> T.List[str]: + incs: T.List[str] = [] + for i in self.get_custom_target_dirs(target, compiler, absolute_path=absolute_path): + incs += compiler.get_include_args(i, False) + return incs + + def eval_custom_target_command( + self, target: build.CustomTarget, absolute_outputs: bool = False) -> \ + T.Tuple[T.List[str], T.List[str], T.List[str]]: + # We want the outputs to be absolute only when using the VS backend + # XXX: Maybe allow the vs backend to use relative paths too? + source_root = self.build_to_src + build_root = '.' + outdir = self.get_custom_target_output_dir(target) + if absolute_outputs: + source_root = self.environment.get_source_dir() + build_root = self.environment.get_build_dir() + outdir = os.path.join(self.environment.get_build_dir(), outdir) + outputs = [os.path.join(outdir, i) for i in target.get_outputs()] + inputs = self.get_custom_target_sources(target) + # Evaluate the command list + cmd: T.List[str] = [] + for i in target.command: + if isinstance(i, build.BuildTarget): + cmd += self.build_target_to_cmd_array(i) + continue + elif isinstance(i, build.CustomTarget): + # GIR scanner will attempt to execute this binary but + # it assumes that it is in path, so always give it a full path. + tmp = i.get_outputs()[0] + i = os.path.join(self.get_custom_target_output_dir(i), tmp) + elif isinstance(i, mesonlib.File): + i = i.rel_to_builddir(self.build_to_src) + if target.absolute_paths or absolute_outputs: + i = os.path.join(self.environment.get_build_dir(), i) + # FIXME: str types are blindly added ignoring 'target.absolute_paths' + # because we can't know if they refer to a file or just a string + elif isinstance(i, str): + if '@SOURCE_ROOT@' in i: + i = i.replace('@SOURCE_ROOT@', source_root) + if '@BUILD_ROOT@' in i: + i = i.replace('@BUILD_ROOT@', build_root) + if '@CURRENT_SOURCE_DIR@' in i: + i = i.replace('@CURRENT_SOURCE_DIR@', os.path.join(source_root, target.subdir)) + if '@DEPFILE@' in i: + if target.depfile is None: + msg = f'Custom target {target.name!r} has @DEPFILE@ but no depfile ' \ + 'keyword argument.' + raise MesonException(msg) + dfilename = os.path.join(outdir, target.depfile) + i = i.replace('@DEPFILE@', dfilename) + if '@PRIVATE_DIR@' in i: + if target.absolute_paths: + pdir = self.get_target_private_dir_abs(target) + else: + pdir = self.get_target_private_dir(target) + i = i.replace('@PRIVATE_DIR@', pdir) + else: + raise RuntimeError(f'Argument {i} is of unknown type {type(i)}') + cmd.append(i) + # Substitute the rest of the template strings + values = mesonlib.get_filenames_templates_dict(inputs, outputs) + cmd = mesonlib.substitute_values(cmd, values) + # This should not be necessary but removing it breaks + # building GStreamer on Windows. The underlying issue + # is problems with quoting backslashes on Windows + # which is the seventh circle of hell. The downside is + # that this breaks custom targets whose command lines + # have backslashes. If you try to fix this be sure to + # check that it does not break GST. + # + # The bug causes file paths such as c:\foo to get escaped + # into c:\\foo. + # + # Unfortunately we have not been able to come up with an + # isolated test case for this so unless you manage to come up + # with one, the only way is to test the building with Gst's + # setup. Note this in your MR or ping us and we will get it + # fixed. + # + # https://github.com/mesonbuild/meson/pull/737 + cmd = [i.replace('\\', '/') for i in cmd] + return inputs, outputs, cmd + + def get_run_target_env(self, target: build.RunTarget) -> build.EnvironmentVariables: + env = target.env if target.env else build.EnvironmentVariables() + if target.default_env: + introspect_cmd = join_args(self.environment.get_build_command() + ['introspect']) + env.set('MESON_SOURCE_ROOT', [self.environment.get_source_dir()]) + env.set('MESON_BUILD_ROOT', [self.environment.get_build_dir()]) + env.set('MESON_SUBDIR', [target.subdir]) + env.set('MESONINTROSPECT', [introspect_cmd]) + return env + + def run_postconf_scripts(self) -> None: + from ..scripts.meson_exe import run_exe + introspect_cmd = join_args(self.environment.get_build_command() + ['introspect']) + env = {'MESON_SOURCE_ROOT': self.environment.get_source_dir(), + 'MESON_BUILD_ROOT': self.environment.get_build_dir(), + 'MESONINTROSPECT': introspect_cmd, + } + + for s in self.build.postconf_scripts: + name = ' '.join(s.cmd_args) + mlog.log(f'Running postconf script {name!r}') + run_exe(s, env) + + def create_install_data(self) -> InstallData: + strip_bin = self.environment.lookup_binary_entry(MachineChoice.HOST, 'strip') + if strip_bin is None: + if self.environment.is_cross_build(): + mlog.warning('Cross file does not specify strip binary, result will not be stripped.') + else: + # TODO go through all candidates, like others + strip_bin = [detect.defaults['strip'][0]] + + umask = self.environment.coredata.get_option(OptionKey('install_umask')) + assert isinstance(umask, (str, int)), 'for mypy' + + d = InstallData(self.environment.get_source_dir(), + self.environment.get_build_dir(), + self.environment.get_prefix(), + self.environment.get_libdir(), + strip_bin, + umask, + self.environment.get_build_command() + ['introspect'], + self.environment.coredata.version) + self.generate_depmf_install(d) + self.generate_target_install(d) + self.generate_header_install(d) + self.generate_man_install(d) + self.generate_emptydir_install(d) + self.generate_data_install(d) + self.generate_symlink_install(d) + self.generate_custom_install_script(d) + self.generate_subdir_install(d) + return d + + def create_install_data_files(self) -> None: + install_data_file = os.path.join(self.environment.get_scratch_dir(), 'install.dat') + with open(install_data_file, 'wb') as ofile: + pickle.dump(self.create_install_data(), ofile) + + def guess_install_tag(self, fname: str, outdir: T.Optional[str] = None) -> T.Optional[str]: + prefix = self.environment.get_prefix() + bindir = Path(prefix, self.environment.get_bindir()) + libdir = Path(prefix, self.environment.get_libdir()) + incdir = Path(prefix, self.environment.get_includedir()) + _ldir = self.environment.coredata.get_option(mesonlib.OptionKey('localedir')) + assert isinstance(_ldir, str), 'for mypy' + localedir = Path(prefix, _ldir) + dest_path = Path(prefix, outdir, Path(fname).name) if outdir else Path(prefix, fname) + if bindir in dest_path.parents: + return 'runtime' + elif libdir in dest_path.parents: + if dest_path.suffix in {'.a', '.pc'}: + return 'devel' + elif dest_path.suffix in {'.so', '.dll'}: + return 'runtime' + elif incdir in dest_path.parents: + return 'devel' + elif localedir in dest_path.parents: + return 'i18n' + elif 'installed-tests' in dest_path.parts: + return 'tests' + elif 'systemtap' in dest_path.parts: + return 'systemtap' + mlog.debug('Failed to guess install tag for', dest_path) + return None + + def generate_target_install(self, d: InstallData) -> None: + for t in self.build.get_targets().values(): + if not t.should_install(): + continue + outdirs, install_dir_names, custom_install_dir = t.get_install_dir() + # Sanity-check the outputs and install_dirs + num_outdirs, num_out = len(outdirs), len(t.get_outputs()) + if num_outdirs not in {1, num_out}: + m = 'Target {!r} has {} outputs: {!r}, but only {} "install_dir"s were found.\n' \ + "Pass 'false' for outputs that should not be installed and 'true' for\n" \ + 'using the default installation directory for an output.' + raise MesonException(m.format(t.name, num_out, t.get_outputs(), num_outdirs)) + assert len(t.install_tag) == num_out + install_mode = t.get_custom_install_mode() + # because mypy gets confused type narrowing in lists + first_outdir = outdirs[0] + first_outdir_name = install_dir_names[0] + + # Install the target output(s) + if isinstance(t, build.BuildTarget): + # In general, stripping static archives is tricky and full of pitfalls. + # Wholesale stripping of static archives with a command such as + # + # strip libfoo.a + # + # is broken, as GNU's strip will remove *every* symbol in a static + # archive. One solution to this nonintuitive behaviour would be + # to only strip local/debug symbols. Unfortunately, strip arguments + # are not specified by POSIX and therefore not portable. GNU's `-g` + # option (i.e. remove debug symbols) is equivalent to Apple's `-S`. + # + # TODO: Create GNUStrip/AppleStrip/etc. hierarchy for more + # fine-grained stripping of static archives. + can_strip = not isinstance(t, build.StaticLibrary) + should_strip = can_strip and t.get_option(OptionKey('strip')) + assert isinstance(should_strip, bool), 'for mypy' + # Install primary build output (library/executable/jar, etc) + # Done separately because of strip/aliases/rpath + if first_outdir is not False: + tag = t.install_tag[0] or ('devel' if isinstance(t, build.StaticLibrary) else 'runtime') + mappings = t.get_link_deps_mapping(d.prefix) + i = TargetInstallData(self.get_target_filename(t), first_outdir, + first_outdir_name, + should_strip, mappings, t.rpath_dirs_to_remove, + t.install_rpath, install_mode, t.subproject, + tag=tag, can_strip=can_strip) + d.targets.append(i) + + for alias, to, tag in t.get_aliases(): + alias = os.path.join(first_outdir, alias) + s = InstallSymlinkData(to, alias, first_outdir, t.subproject, tag, allow_missing=True) + d.symlinks.append(s) + + if isinstance(t, (build.SharedLibrary, build.SharedModule, build.Executable)): + # On toolchains/platforms that use an import library for + # linking (separate from the shared library with all the + # code), we need to install that too (dll.a/.lib). + if t.get_import_filename(): + if custom_install_dir: + # If the DLL is installed into a custom directory, + # install the import library into the same place so + # it doesn't go into a surprising place + implib_install_dir = first_outdir + else: + implib_install_dir = self.environment.get_import_lib_dir() + # Install the import library; may not exist for shared modules + i = TargetInstallData(self.get_target_filename_for_linking(t), + implib_install_dir, first_outdir_name, + False, {}, set(), '', install_mode, + t.subproject, optional=isinstance(t, build.SharedModule), + tag='devel') + d.targets.append(i) + + if not should_strip and t.get_debug_filename(): + debug_file = os.path.join(self.get_target_dir(t), t.get_debug_filename()) + i = TargetInstallData(debug_file, first_outdir, + first_outdir_name, + False, {}, set(), '', + install_mode, t.subproject, + optional=True, tag='devel') + d.targets.append(i) + # Install secondary outputs. Only used for Vala right now. + if num_outdirs > 1: + for output, outdir, outdir_name, tag in zip(t.get_outputs()[1:], outdirs[1:], install_dir_names[1:], t.install_tag[1:]): + # User requested that we not install this output + if outdir is False: + continue + f = os.path.join(self.get_target_dir(t), output) + i = TargetInstallData(f, outdir, outdir_name, False, {}, set(), None, + install_mode, t.subproject, + tag=tag) + d.targets.append(i) + elif isinstance(t, build.CustomTarget): + # If only one install_dir is specified, assume that all + # outputs will be installed into it. This is for + # backwards-compatibility and because it makes sense to + # avoid repetition since this is a common use-case. + # + # To selectively install only some outputs, pass `false` as + # the install_dir for the corresponding output by index + # + # XXX: this wouldn't be needed if we just always matches outdirs + # to the length of outputs… + if num_outdirs == 1 and num_out > 1: + if first_outdir is not False: + for output, tag in zip(t.get_outputs(), t.install_tag): + tag = tag or self.guess_install_tag(output, first_outdir) + f = os.path.join(self.get_target_dir(t), output) + i = TargetInstallData(f, first_outdir, first_outdir_name, + False, {}, set(), None, install_mode, + t.subproject, optional=not t.build_by_default, + tag=tag) + d.targets.append(i) + else: + for output, outdir, outdir_name, tag in zip(t.get_outputs(), outdirs, install_dir_names, t.install_tag): + # User requested that we not install this output + if outdir is False: + continue + tag = tag or self.guess_install_tag(output, outdir) + f = os.path.join(self.get_target_dir(t), output) + i = TargetInstallData(f, outdir, outdir_name, + False, {}, set(), None, install_mode, + t.subproject, optional=not t.build_by_default, + tag=tag) + d.targets.append(i) + + def generate_custom_install_script(self, d: InstallData) -> None: + d.install_scripts = self.build.install_scripts + for i in d.install_scripts: + if not i.tag: + mlog.debug('Failed to guess install tag for install script:', ' '.join(i.cmd_args)) + + def generate_header_install(self, d: InstallData) -> None: + incroot = self.environment.get_includedir() + headers = self.build.get_headers() + + srcdir = self.environment.get_source_dir() + builddir = self.environment.get_build_dir() + for h in headers: + outdir = outdir_name = h.get_custom_install_dir() + if outdir is None: + subdir = h.get_install_subdir() + if subdir is None: + outdir = incroot + outdir_name = '{includedir}' + else: + outdir = os.path.join(incroot, subdir) + outdir_name = os.path.join('{includedir}', subdir) + + for f in h.get_sources(): + if not isinstance(f, File): + raise MesonException(f'Invalid header type {f!r} can\'t be installed') + abspath = f.absolute_path(srcdir, builddir) + i = InstallDataBase(abspath, outdir, outdir_name, h.get_custom_install_mode(), h.subproject, tag='devel') + d.headers.append(i) + + def generate_man_install(self, d: InstallData) -> None: + manroot = self.environment.get_mandir() + man = self.build.get_man() + for m in man: + for f in m.get_sources(): + num = f.split('.')[-1] + subdir = m.get_custom_install_dir() + if subdir is None: + if m.locale: + subdir = os.path.join('{mandir}', m.locale, 'man' + num) + else: + subdir = os.path.join('{mandir}', 'man' + num) + fname = f.fname + if m.locale: # strip locale from file name + fname = fname.replace(f'.{m.locale}', '') + srcabs = f.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir()) + dstname = os.path.join(subdir, os.path.basename(fname)) + dstabs = dstname.replace('{mandir}', manroot) + i = InstallDataBase(srcabs, dstabs, dstname, m.get_custom_install_mode(), m.subproject, tag='man') + d.man.append(i) + + def generate_emptydir_install(self, d: InstallData) -> None: + emptydir: T.List[build.EmptyDir] = self.build.get_emptydir() + for e in emptydir: + tag = e.install_tag or self.guess_install_tag(e.path) + i = InstallEmptyDir(e.path, e.install_mode, e.subproject, tag) + d.emptydir.append(i) + + def generate_data_install(self, d: InstallData) -> None: + data = self.build.get_data() + srcdir = self.environment.get_source_dir() + builddir = self.environment.get_build_dir() + for de in data: + assert isinstance(de, build.Data) + subdir = de.install_dir + subdir_name = de.install_dir_name + if not subdir: + subdir = os.path.join(self.environment.get_datadir(), self.interpreter.build.project_name) + subdir_name = os.path.join('{datadir}', self.interpreter.build.project_name) + for src_file, dst_name in zip(de.sources, de.rename): + assert isinstance(src_file, mesonlib.File) + dst_abs = os.path.join(subdir, dst_name) + dstdir_name = os.path.join(subdir_name, dst_name) + tag = de.install_tag or self.guess_install_tag(dst_abs) + i = InstallDataBase(src_file.absolute_path(srcdir, builddir), dst_abs, dstdir_name, + de.install_mode, de.subproject, tag=tag, data_type=de.data_type) + d.data.append(i) + + def generate_symlink_install(self, d: InstallData) -> None: + links: T.List[build.SymlinkData] = self.build.get_symlinks() + for l in links: + assert isinstance(l, build.SymlinkData) + install_dir = l.install_dir + name_abs = os.path.join(install_dir, l.name) + tag = l.install_tag or self.guess_install_tag(name_abs) + s = InstallSymlinkData(l.target, name_abs, install_dir, l.subproject, tag) + d.symlinks.append(s) + + def generate_subdir_install(self, d: InstallData) -> None: + for sd in self.build.get_install_subdirs(): + if sd.from_source_dir: + from_dir = self.environment.get_source_dir() + else: + from_dir = self.environment.get_build_dir() + src_dir = os.path.join(from_dir, + sd.source_subdir, + sd.installable_subdir).rstrip('/') + dst_dir = os.path.join(self.environment.get_prefix(), + sd.install_dir) + dst_name = os.path.join('{prefix}', sd.install_dir) + if sd.install_dir != sd.install_dir_name: + dst_name = sd.install_dir_name + if not sd.strip_directory: + dst_dir = os.path.join(dst_dir, os.path.basename(src_dir)) + dst_name = os.path.join(dst_name, os.path.basename(src_dir)) + tag = sd.install_tag or self.guess_install_tag(os.path.join(sd.install_dir, 'dummy')) + i = SubdirInstallData(src_dir, dst_dir, dst_name, sd.install_mode, sd.exclude, sd.subproject, tag) + d.install_subdirs.append(i) + + def get_introspection_data(self, target_id: str, target: build.Target) -> T.List['TargetIntrospectionData']: + ''' + Returns a list of source dicts with the following format for a given target: + [ + { + "language": "", + "compiler": ["result", "of", "comp.get_exelist()"], + "parameters": ["list", "of", "compiler", "parameters], + "sources": ["list", "of", "all", "", "source", "files"], + "generated_sources": ["list", "of", "generated", "source", "files"] + } + ] + + This is a limited fallback / reference implementation. The backend should override this method. + ''' + if isinstance(target, (build.CustomTarget, build.BuildTarget)): + source_list_raw = target.sources + source_list = [] + for j in source_list_raw: + if isinstance(j, mesonlib.File): + source_list += [j.absolute_path(self.source_dir, self.build_dir)] + elif isinstance(j, str): + source_list += [os.path.join(self.source_dir, j)] + elif isinstance(j, (build.CustomTarget, build.BuildTarget)): + source_list += [os.path.join(self.build_dir, j.get_subdir(), o) for o in j.get_outputs()] + source_list = [os.path.normpath(s) for s in source_list] + + compiler: T.List[str] = [] + if isinstance(target, build.CustomTarget): + tmp_compiler = target.command + for j in tmp_compiler: + if isinstance(j, mesonlib.File): + compiler += [j.absolute_path(self.source_dir, self.build_dir)] + elif isinstance(j, str): + compiler += [j] + elif isinstance(j, (build.BuildTarget, build.CustomTarget)): + compiler += j.get_outputs() + else: + raise RuntimeError(f'Type "{type(j).__name__}" is not supported in get_introspection_data. This is a bug') + + return [{ + 'language': 'unknown', + 'compiler': compiler, + 'parameters': [], + 'sources': source_list, + 'generated_sources': [] + }] + + return [] + + def get_devenv(self) -> build.EnvironmentVariables: + env = build.EnvironmentVariables() + extra_paths = set() + library_paths = set() + build_machine = self.environment.machines[MachineChoice.BUILD] + host_machine = self.environment.machines[MachineChoice.HOST] + need_wine = not build_machine.is_windows() and host_machine.is_windows() + for t in self.build.get_targets().values(): + in_default_dir = t.should_install() and not t.get_install_dir()[2] + if t.for_machine != MachineChoice.HOST or not in_default_dir: + continue + tdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(t)) + if isinstance(t, build.Executable): + # Add binaries that are going to be installed in bindir into PATH + # so they get used by default instead of searching on system when + # in developer environment. + extra_paths.add(tdir) + if host_machine.is_windows() or host_machine.is_cygwin(): + # On windows we cannot rely on rpath to run executables from build + # directory. We have to add in PATH the location of every DLL needed. + library_paths.update(self.determine_windows_extra_paths(t, [])) + elif isinstance(t, build.SharedLibrary): + # Add libraries that are going to be installed in libdir into + # LD_LIBRARY_PATH. This allows running system applications using + # that library. + library_paths.add(tdir) + if need_wine: + # Executable paths should be in both PATH and WINEPATH. + # - Having them in PATH makes bash completion find it, + # and make running "foo.exe" find it when wine-binfmt is installed. + # - Having them in WINEPATH makes "wine foo.exe" find it. + library_paths.update(extra_paths) + if library_paths: + if need_wine: + env.prepend('WINEPATH', list(library_paths), separator=';') + elif host_machine.is_windows() or host_machine.is_cygwin(): + extra_paths.update(library_paths) + elif host_machine.is_darwin(): + env.prepend('DYLD_LIBRARY_PATH', list(library_paths)) + else: + env.prepend('LD_LIBRARY_PATH', list(library_paths)) + if extra_paths: + env.prepend('PATH', list(extra_paths)) + return env + + def compiler_to_generator(self, target: build.BuildTarget, + compiler: 'Compiler', + sources: _ALL_SOURCES_TYPE, + output_templ: str) -> build.GeneratedList: + ''' + Some backends don't support custom compilers. This is a convenience + method to convert a Compiler to a Generator. + ''' + exelist = compiler.get_exelist() + exe = programs.ExternalProgram(exelist[0]) + args = exelist[1:] + # FIXME: There are many other args missing + commands = self.generate_basic_compiler_args(target, compiler) + commands += compiler.get_dependency_gen_args('@OUTPUT@', '@DEPFILE@') + commands += compiler.get_output_args('@OUTPUT@') + commands += compiler.get_compile_only_args() + ['@INPUT@'] + commands += self.get_source_dir_include_args(target, compiler) + commands += self.get_build_dir_include_args(target, compiler) + generator = build.Generator(exe, args + commands.to_native(), [output_templ], depfile='@PLAINNAME@.d') + return generator.process_files(sources, self.interpreter) + + def compile_target_to_generator(self, target: build.CompileTarget) -> build.GeneratedList: + all_sources = T.cast('_ALL_SOURCES_TYPE', target.sources) + T.cast('_ALL_SOURCES_TYPE', target.generated) + return self.compiler_to_generator(target, target.compiler, all_sources, target.output_templ) diff --git a/vendored-meson/meson/mesonbuild/backend/ninjabackend.py b/vendored-meson/meson/mesonbuild/backend/ninjabackend.py new file mode 100644 index 000000000000..1786fef893f3 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/backend/ninjabackend.py @@ -0,0 +1,3846 @@ +# Copyright 2012-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from collections import OrderedDict +from dataclasses import dataclass +from enum import Enum, unique +from functools import lru_cache +from pathlib import PurePath, Path +from textwrap import dedent +import itertools +import json +import os +import pickle +import re +import shlex +import subprocess +import typing as T + +from . import backends +from .. import modules +from .. import environment, mesonlib +from .. import build +from .. import mlog +from .. import compilers +from ..arglist import CompilerArgs +from ..compilers import Compiler +from ..linkers import ArLikeLinker, RSPFileSyntax +from ..mesonlib import ( + File, LibType, MachineChoice, MesonBugException, MesonException, OrderedSet, PerMachine, + ProgressBar, quote_arg +) +from ..mesonlib import get_compiler_for_source, has_path_sep, OptionKey +from .backends import CleanTrees +from ..build import GeneratedList, InvalidArguments + +if T.TYPE_CHECKING: + from typing_extensions import Literal + + from .._typing import ImmutableListProtocol + from ..build import ExtractedObjects, LibTypes + from ..interpreter import Interpreter + from ..linkers.linkers import DynamicLinker, StaticLinker + from ..compilers.cs import CsCompiler + from ..compilers.fortran import FortranCompiler + + CommandArgOrStr = T.List[T.Union['NinjaCommandArg', str]] + RUST_EDITIONS = Literal['2015', '2018', '2021'] + + +FORTRAN_INCLUDE_PAT = r"^\s*#?include\s*['\"](\w+\.\w+)['\"]" +FORTRAN_MODULE_PAT = r"^\s*\bmodule\b\s+(\w+)\s*(?:!+.*)*$" +FORTRAN_SUBMOD_PAT = r"^\s*\bsubmodule\b\s*\((\w+:?\w+)\)\s*(\w+)" +FORTRAN_USE_PAT = r"^\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)" + +def cmd_quote(arg: str) -> str: + # see: https://docs.microsoft.com/en-us/windows/desktop/api/shellapi/nf-shellapi-commandlinetoargvw#remarks + + # backslash escape any existing double quotes + # any existing backslashes preceding a quote are doubled + arg = re.sub(r'(\\*)"', lambda m: '\\' * (len(m.group(1)) * 2 + 1) + '"', arg) + # any terminal backslashes likewise need doubling + arg = re.sub(r'(\\*)$', lambda m: '\\' * (len(m.group(1)) * 2), arg) + # and double quote + arg = f'"{arg}"' + + return arg + +def gcc_rsp_quote(s: str) -> str: + # see: the function buildargv() in libiberty + # + # this differs from sh-quoting in that a backslash *always* escapes the + # following character, even inside single quotes. + + s = s.replace('\\', '\\\\') + + return shlex.quote(s) + +# How ninja executes command lines differs between Unix and Windows +# (see https://ninja-build.org/manual.html#ref_rule_command) +if mesonlib.is_windows(): + quote_func = cmd_quote + execute_wrapper = ['cmd', '/c'] # unused + rmfile_prefix = ['del', '/f', '/s', '/q', '{}', '&&'] +else: + quote_func = quote_arg + execute_wrapper = [] + rmfile_prefix = ['rm', '-f', '{}', '&&'] + + +def get_rsp_threshold() -> int: + '''Return a conservative estimate of the commandline size in bytes + above which a response file should be used. May be overridden for + debugging by setting environment variable MESON_RSP_THRESHOLD.''' + + if mesonlib.is_windows(): + # Usually 32k, but some projects might use cmd.exe, + # and that has a limit of 8k. + limit = 8192 + else: + # On Linux, ninja always passes the commandline as a single + # big string to /bin/sh, and the kernel limits the size of a + # single argument; see MAX_ARG_STRLEN + limit = 131072 + # Be conservative + limit = limit // 2 + return int(os.environ.get('MESON_RSP_THRESHOLD', limit)) + +# a conservative estimate of the command-line length limit +rsp_threshold = get_rsp_threshold() + +# ninja variables whose value should remain unquoted. The value of these ninja +# variables (or variables we use them in) is interpreted directly by ninja +# (e.g. the value of the depfile variable is a pathname that ninja will read +# from, etc.), so it must not be shell quoted. +raw_names = {'DEPFILE_UNQUOTED', 'DESC', 'pool', 'description', 'targetdep', 'dyndep'} + +NINJA_QUOTE_BUILD_PAT = re.compile(r"[$ :\n]") +NINJA_QUOTE_VAR_PAT = re.compile(r"[$ \n]") + +def ninja_quote(text: str, is_build_line: bool = False) -> str: + if is_build_line: + quote_re = NINJA_QUOTE_BUILD_PAT + else: + quote_re = NINJA_QUOTE_VAR_PAT + # Fast path for when no quoting is necessary + if not quote_re.search(text): + return text + if '\n' in text: + errmsg = f'''Ninja does not support newlines in rules. The content was: + +{text} + +Please report this error with a test case to the Meson bug tracker.''' + raise MesonException(errmsg) + return quote_re.sub(r'$\g<0>', text) + +class TargetDependencyScannerInfo: + def __init__(self, private_dir: str, source2object: T.Dict[str, str]): + self.private_dir = private_dir + self.source2object = source2object + +@unique +class Quoting(Enum): + both = 0 + notShell = 1 + notNinja = 2 + none = 3 + +class NinjaCommandArg: + def __init__(self, s: str, quoting: Quoting = Quoting.both) -> None: + self.s = s + self.quoting = quoting + + def __str__(self) -> str: + return self.s + + @staticmethod + def list(l: str, q: Quoting) -> T.List[NinjaCommandArg]: + return [NinjaCommandArg(i, q) for i in l] + +@dataclass +class NinjaComment: + comment: str + + def write(self, outfile: T.TextIO) -> None: + for l in self.comment.split('\n'): + outfile.write('# ') + outfile.write(l) + outfile.write('\n') + outfile.write('\n') + +class NinjaRule: + def __init__(self, rule: str, command: CommandArgOrStr, args: CommandArgOrStr, + description: str, rspable: bool = False, deps: T.Optional[str] = None, + depfile: T.Optional[str] = None, extra: T.Optional[str] = None, + rspfile_quote_style: RSPFileSyntax = RSPFileSyntax.GCC): + + def strToCommandArg(c: T.Union[NinjaCommandArg, str]) -> NinjaCommandArg: + if isinstance(c, NinjaCommandArg): + return c + + # deal with common cases here, so we don't have to explicitly + # annotate the required quoting everywhere + if c == '&&': + # shell constructs shouldn't be shell quoted + return NinjaCommandArg(c, Quoting.notShell) + if c.startswith('$'): + var = re.search(r'\$\{?(\w*)\}?', c).group(1) + if var not in raw_names: + # ninja variables shouldn't be ninja quoted, and their value + # is already shell quoted + return NinjaCommandArg(c, Quoting.none) + else: + # shell quote the use of ninja variables whose value must + # not be shell quoted (as it also used by ninja) + return NinjaCommandArg(c, Quoting.notNinja) + + return NinjaCommandArg(c) + + self.name = rule + self.command: T.List[NinjaCommandArg] = [strToCommandArg(c) for c in command] # includes args which never go into a rspfile + self.args: T.List[NinjaCommandArg] = [strToCommandArg(a) for a in args] # args which will go into a rspfile, if used + self.description = description + self.deps = deps # depstyle 'gcc' or 'msvc' + self.depfile = depfile + self.extra = extra + self.rspable = rspable # if a rspfile can be used + self.refcount = 0 + self.rsprefcount = 0 + self.rspfile_quote_style = rspfile_quote_style + + if self.depfile == '$DEPFILE': + self.depfile += '_UNQUOTED' + + @staticmethod + def _quoter(x, qf = quote_func): + if isinstance(x, NinjaCommandArg): + if x.quoting == Quoting.none: + return x.s + elif x.quoting == Quoting.notNinja: + return qf(x.s) + elif x.quoting == Quoting.notShell: + return ninja_quote(x.s) + # fallthrough + return ninja_quote(qf(str(x))) + + def write(self, outfile: T.TextIO) -> None: + if self.rspfile_quote_style is RSPFileSyntax.MSVC: + rspfile_quote_func = cmd_quote + else: + rspfile_quote_func = gcc_rsp_quote + + def rule_iter(): + if self.refcount: + yield '' + if self.rsprefcount: + yield '_RSP' + + for rsp in rule_iter(): + outfile.write(f'rule {self.name}{rsp}\n') + if rsp == '_RSP': + outfile.write(' command = {} @$out.rsp\n'.format(' '.join([self._quoter(x) for x in self.command]))) + outfile.write(' rspfile = $out.rsp\n') + outfile.write(' rspfile_content = {}\n'.format(' '.join([self._quoter(x, rspfile_quote_func) for x in self.args]))) + else: + outfile.write(' command = {}\n'.format(' '.join([self._quoter(x) for x in self.command + self.args]))) + if self.deps: + outfile.write(f' deps = {self.deps}\n') + if self.depfile: + outfile.write(f' depfile = {self.depfile}\n') + outfile.write(f' description = {self.description}\n') + if self.extra: + for l in self.extra.split('\n'): + outfile.write(' ') + outfile.write(l) + outfile.write('\n') + outfile.write('\n') + + def length_estimate(self, infiles, outfiles, elems): + # determine variables + # this order of actions only approximates ninja's scoping rules, as + # documented at: https://ninja-build.org/manual.html#ref_scope + ninja_vars = {} + for e in elems: + (name, value) = e + ninja_vars[name] = value + ninja_vars['deps'] = self.deps + ninja_vars['depfile'] = self.depfile + ninja_vars['in'] = infiles + ninja_vars['out'] = outfiles + + # expand variables in command + command = ' '.join([self._quoter(x) for x in self.command + self.args]) + estimate = len(command) + for m in re.finditer(r'(\${\w+}|\$\w+)?[^$]*', command): + if m.start(1) != -1: + estimate -= m.end(1) - m.start(1) + 1 + chunk = m.group(1) + if chunk[1] == '{': + chunk = chunk[2:-1] + else: + chunk = chunk[1:] + chunk = ninja_vars.get(chunk, []) # undefined ninja variables are empty + estimate += len(' '.join(chunk)) + + # determine command length + return estimate + +class NinjaBuildElement: + def __init__(self, all_outputs, outfilenames, rulename, infilenames, implicit_outs=None): + self.implicit_outfilenames = implicit_outs or [] + if isinstance(outfilenames, str): + self.outfilenames = [outfilenames] + else: + self.outfilenames = outfilenames + assert isinstance(rulename, str) + self.rulename = rulename + if isinstance(infilenames, str): + self.infilenames = [infilenames] + else: + self.infilenames = infilenames + self.deps = OrderedSet() + self.orderdeps = OrderedSet() + self.elems = [] + self.all_outputs = all_outputs + self.output_errors = '' + + def add_dep(self, dep): + if isinstance(dep, list): + self.deps.update(dep) + else: + self.deps.add(dep) + + def add_orderdep(self, dep): + if isinstance(dep, list): + self.orderdeps.update(dep) + else: + self.orderdeps.add(dep) + + def add_item(self, name, elems): + # Always convert from GCC-style argument naming to the naming used by the + # current compiler. Also filter system include paths, deduplicate, etc. + if isinstance(elems, CompilerArgs): + elems = elems.to_native() + if isinstance(elems, str): + elems = [elems] + self.elems.append((name, elems)) + + if name == 'DEPFILE': + self.elems.append((name + '_UNQUOTED', elems)) + + def _should_use_rspfile(self): + # 'phony' is a rule built-in to ninja + if self.rulename == 'phony': + return False + + if not self.rule.rspable: + return False + + infilenames = ' '.join([ninja_quote(i, True) for i in self.infilenames]) + outfilenames = ' '.join([ninja_quote(i, True) for i in self.outfilenames]) + + return self.rule.length_estimate(infilenames, + outfilenames, + self.elems) >= rsp_threshold + + def count_rule_references(self): + if self.rulename != 'phony': + if self._should_use_rspfile(): + self.rule.rsprefcount += 1 + else: + self.rule.refcount += 1 + + def write(self, outfile): + if self.output_errors: + raise MesonException(self.output_errors) + ins = ' '.join([ninja_quote(i, True) for i in self.infilenames]) + outs = ' '.join([ninja_quote(i, True) for i in self.outfilenames]) + implicit_outs = ' '.join([ninja_quote(i, True) for i in self.implicit_outfilenames]) + if implicit_outs: + implicit_outs = ' | ' + implicit_outs + use_rspfile = self._should_use_rspfile() + if use_rspfile: + rulename = self.rulename + '_RSP' + mlog.debug(f'Command line for building {self.outfilenames} is long, using a response file') + else: + rulename = self.rulename + line = f'build {outs}{implicit_outs}: {rulename} {ins}' + if len(self.deps) > 0: + line += ' | ' + ' '.join([ninja_quote(x, True) for x in sorted(self.deps)]) + if len(self.orderdeps) > 0: + orderdeps = [str(x) for x in self.orderdeps] + line += ' || ' + ' '.join([ninja_quote(x, True) for x in sorted(orderdeps)]) + line += '\n' + # This is the only way I could find to make this work on all + # platforms including Windows command shell. Slash is a dir separator + # on Windows, too, so all characters are unambiguous and, more importantly, + # do not require quoting, unless explicitly specified, which is necessary for + # the csc compiler. + line = line.replace('\\', '/') + if mesonlib.is_windows(): + # Support network paths as backslash, otherwise they are interpreted as + # arguments for compile/link commands when using MSVC + line = ' '.join( + (l.replace('//', '\\\\', 1) if l.startswith('//') else l) + for l in line.split(' ') + ) + outfile.write(line) + + if use_rspfile: + if self.rule.rspfile_quote_style is RSPFileSyntax.MSVC: + qf = cmd_quote + else: + qf = gcc_rsp_quote + else: + qf = quote_func + + for e in self.elems: + (name, elems) = e + should_quote = name not in raw_names + line = f' {name} = ' + newelems = [] + for i in elems: + if not should_quote or i == '&&': # Hackety hack hack + newelems.append(ninja_quote(i)) + else: + newelems.append(ninja_quote(qf(i))) + line += ' '.join(newelems) + line += '\n' + outfile.write(line) + outfile.write('\n') + + def check_outputs(self): + for n in self.outfilenames: + if n in self.all_outputs: + self.output_errors = f'Multiple producers for Ninja target "{n}". Please rename your targets.' + self.all_outputs[n] = True + +@dataclass +class RustDep: + + name: str + + # equal to the order value of the `RustCrate` + crate: int + + def to_json(self) -> T.Dict[str, object]: + return { + "crate": self.crate, + "name": self.name, + } + +@dataclass +class RustCrate: + + # When the json file is written, the list of Crates will be sorted by this + # value + order: int + + display_name: str + root_module: str + edition: RUST_EDITIONS + deps: T.List[RustDep] + cfg: T.List[str] + is_proc_macro: bool + + # This is set to True for members of this project, and False for all + # subprojects + is_workspace_member: bool + proc_macro_dylib_path: T.Optional[str] = None + + def to_json(self) -> T.Dict[str, object]: + ret: T.Dict[str, object] = { + "display_name": self.display_name, + "root_module": self.root_module, + "edition": self.edition, + "cfg": self.cfg, + "is_proc_macro": self.is_proc_macro, + "deps": [d.to_json() for d in self.deps], + } + + if self.is_proc_macro: + assert self.proc_macro_dylib_path is not None, "This shouldn't happen" + ret["proc_macro_dylib_path"] = self.proc_macro_dylib_path + + return ret + + +class NinjaBackend(backends.Backend): + + def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) + self.name = 'ninja' + self.ninja_filename = 'build.ninja' + self.fortran_deps = {} + self.all_outputs = {} + self.introspection_data = {} + self.created_llvm_ir_rule = PerMachine(False, False) + self.rust_crates: T.Dict[str, RustCrate] = {} + self.implicit_meson_outs = [] + + def create_phony_target(self, all_outputs, dummy_outfile, rulename, phony_infilename, implicit_outs=None): + ''' + We need to use aliases for targets that might be used as directory + names to workaround a Ninja bug that breaks `ninja -t clean`. + This is used for 'reserved' targets such as 'test', 'install', + 'benchmark', etc, and also for RunTargets. + https://github.com/mesonbuild/meson/issues/1644 + ''' + if dummy_outfile.startswith('meson-internal__'): + raise AssertionError(f'Invalid usage of create_phony_target with {dummy_outfile!r}') + + to_name = f'meson-internal__{dummy_outfile}' + elem = NinjaBuildElement(all_outputs, dummy_outfile, 'phony', to_name) + self.add_build(elem) + + return NinjaBuildElement(all_outputs, to_name, rulename, phony_infilename, implicit_outs) + + def detect_vs_dep_prefix(self, tempfilename): + '''VS writes its dependency in a locale dependent format. + Detect the search prefix to use.''' + # TODO don't hard-code host + for compiler in self.environment.coredata.compilers.host.values(): + # Have to detect the dependency format + + # IFort / masm on windows is MSVC like, but doesn't have /showincludes + if compiler.language in {'fortran', 'masm'}: + continue + if compiler.id == 'pgi' and mesonlib.is_windows(): + # for the purpose of this function, PGI doesn't act enough like MSVC + return open(tempfilename, 'a', encoding='utf-8') + if compiler.get_argument_syntax() == 'msvc': + break + else: + # None of our compilers are MSVC, we're done. + return open(tempfilename, 'a', encoding='utf-8') + filebase = 'incdetect.' + compilers.lang_suffixes[compiler.language][0] + filename = os.path.join(self.environment.get_scratch_dir(), + filebase) + with open(filename, 'w', encoding='utf-8') as f: + f.write(dedent('''\ + #include + int dummy; + ''')) + + # The output of cl dependency information is language + # and locale dependent. Any attempt at converting it to + # Python strings leads to failure. We _must_ do this detection + # in raw byte mode and write the result in raw bytes. + pc = subprocess.Popen(compiler.get_exelist() + + ['/showIncludes', '/c', filebase], + cwd=self.environment.get_scratch_dir(), + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + (stdout, stderr) = pc.communicate() + + # We want to match 'Note: including file: ' in the line + # 'Note: including file: d:\MyDir\include\stdio.h', however + # different locales have different messages with a different + # number of colons. Match up to the drive name 'd:\'. + # When used in cross compilation, the path separator is a + # forward slash rather than a backslash so handle both; i.e. + # the path is /MyDir/include/stdio.h. + # With certain cross compilation wrappings of MSVC, the paths + # use backslashes, but without the leading drive name, so + # allow the path to start with any path separator, i.e. + # \MyDir\include\stdio.h. + matchre = re.compile(rb"^(.*\s)([a-zA-Z]:[\\/]|[\\\/]).*stdio.h$") + + def detect_prefix(out): + for line in re.split(rb'\r?\n', out): + match = matchre.match(line) + if match: + with open(tempfilename, 'ab') as binfile: + binfile.write(b'msvc_deps_prefix = ' + match.group(1) + b'\n') + return open(tempfilename, 'a', encoding='utf-8') + return None + + # Some cl wrappers (e.g. Squish Coco) output dependency info + # to stderr rather than stdout + result = detect_prefix(stdout) or detect_prefix(stderr) + if result: + return result + + raise MesonException(f'Could not determine vs dep dependency prefix string. output: {stderr} {stdout}') + + def generate(self, capture: bool = False, vslite_ctx: dict = None) -> T.Optional[dict]: + if vslite_ctx: + # We don't yet have a use case where we'd expect to make use of this, + # so no harm in catching and reporting something unexpected. + raise MesonBugException('We do not expect the ninja backend to be given a valid \'vslite_ctx\'') + ninja = environment.detect_ninja_command_and_version(log=True) + if self.environment.coredata.get_option(OptionKey('vsenv')): + builddir = Path(self.environment.get_build_dir()) + try: + # For prettier printing, reduce to a relative path. If + # impossible (e.g., because builddir and cwd are on + # different Windows drives), skip and use the full path. + builddir = builddir.relative_to(Path.cwd()) + except ValueError: + pass + meson_command = mesonlib.join_args(mesonlib.get_meson_command()) + mlog.log() + mlog.log('Visual Studio environment is needed to run Ninja. It is recommended to use Meson wrapper:') + mlog.log(f'{meson_command} compile -C {builddir}') + if ninja is None: + raise MesonException('Could not detect Ninja v1.8.2 or newer') + (self.ninja_command, self.ninja_version) = ninja + outfilename = os.path.join(self.environment.get_build_dir(), self.ninja_filename) + tempfilename = outfilename + '~' + with open(tempfilename, 'w', encoding='utf-8') as outfile: + outfile.write(f'# This is the build file for project "{self.build.get_project()}"\n') + outfile.write('# It is autogenerated by the Meson build system.\n') + outfile.write('# Do not edit by hand.\n\n') + outfile.write('ninja_required_version = 1.8.2\n\n') + + num_pools = self.environment.coredata.options[OptionKey('backend_max_links')].value + if num_pools > 0: + outfile.write(f'''pool link_pool + depth = {num_pools} + +''') + + with self.detect_vs_dep_prefix(tempfilename) as outfile: + self.generate_rules() + + self.build_elements = [] + self.generate_phony() + self.add_build_comment(NinjaComment('Build rules for targets')) + + # Optionally capture compile args per target, for later use (i.e. VisStudio project's NMake intellisense include dirs, defines, and compile options). + if capture: + captured_compile_args_per_target = {} + for target in self.build.get_targets().values(): + if isinstance(target, build.BuildTarget): + captured_compile_args_per_target[target.get_id()] = self.generate_common_compile_args_per_src_type(target) + + for t in ProgressBar(self.build.get_targets().values(), desc='Generating targets'): + self.generate_target(t) + self.add_build_comment(NinjaComment('Test rules')) + self.generate_tests() + self.add_build_comment(NinjaComment('Install rules')) + self.generate_install() + self.generate_dist() + key = OptionKey('b_coverage') + if (key in self.environment.coredata.options and + self.environment.coredata.options[key].value): + gcovr_exe, gcovr_version, lcov_exe, genhtml_exe, _ = environment.find_coverage_tools() + if gcovr_exe or (lcov_exe and genhtml_exe): + self.add_build_comment(NinjaComment('Coverage rules')) + self.generate_coverage_rules(gcovr_exe, gcovr_version) + else: + # FIXME: since we explicitly opted in, should this be an error? + # The docs just say these targets will be created "if possible". + mlog.warning('Need gcovr or lcov/genhtml to generate any coverage reports') + self.add_build_comment(NinjaComment('Suffix')) + self.generate_utils() + self.generate_ending() + + self.write_rules(outfile) + self.write_builds(outfile) + + default = 'default all\n\n' + outfile.write(default) + # Only overwrite the old build file after the new one has been + # fully created. + os.replace(tempfilename, outfilename) + mlog.cmd_ci_include(outfilename) # For CI debugging + # Refresh Ninja's caches. https://github.com/ninja-build/ninja/pull/1685 + if mesonlib.version_compare(self.ninja_version, '>=1.10.0') and os.path.exists('.ninja_deps'): + subprocess.call(self.ninja_command + ['-t', 'restat']) + subprocess.call(self.ninja_command + ['-t', 'cleandead']) + self.generate_compdb() + self.generate_rust_project_json() + + if capture: + return captured_compile_args_per_target + + def generate_rust_project_json(self) -> None: + """Generate a rust-analyzer compatible rust-project.json file.""" + if not self.rust_crates: + return + with open(os.path.join(self.environment.get_build_dir(), 'rust-project.json'), + 'w', encoding='utf-8') as f: + json.dump( + { + "sysroot_src": os.path.join(self.environment.coredata.compilers.host['rust'].get_sysroot(), + 'lib/rustlib/src/rust/library/'), + "crates": [c.to_json() for c in self.rust_crates.values()], + }, + f, indent=4) + + # http://clang.llvm.org/docs/JSONCompilationDatabase.html + def generate_compdb(self): + rules = [] + # TODO: Rather than an explicit list here, rules could be marked in the + # rule store as being wanted in compdb + for for_machine in MachineChoice: + for compiler in self.environment.coredata.compilers[for_machine].values(): + rules += [f"{rule}{ext}" for rule in [self.compiler_to_rule_name(compiler)] + for ext in ['', '_RSP']] + rules += [f"{rule}{ext}" for rule in [self.compiler_to_pch_rule_name(compiler)] + for ext in ['', '_RSP']] + compdb_options = ['-x'] if mesonlib.version_compare(self.ninja_version, '>=1.9') else [] + ninja_compdb = self.ninja_command + ['-t', 'compdb'] + compdb_options + rules + builddir = self.environment.get_build_dir() + try: + jsondb = subprocess.check_output(ninja_compdb, cwd=builddir) + with open(os.path.join(builddir, 'compile_commands.json'), 'wb') as f: + f.write(jsondb) + except Exception: + mlog.warning('Could not create compilation database.', fatal=False) + + # Get all generated headers. Any source file might need them so + # we need to add an order dependency to them. + def get_generated_headers(self, target): + if hasattr(target, 'cached_generated_headers'): + return target.cached_generated_headers + header_deps = [] + # XXX: Why don't we add deps to CustomTarget headers here? + for genlist in target.get_generated_sources(): + if isinstance(genlist, (build.CustomTarget, build.CustomTargetIndex)): + continue + for src in genlist.get_outputs(): + if self.environment.is_header(src): + header_deps.append(self.get_target_generated_dir(target, genlist, src)) + if 'vala' in target.compilers and not isinstance(target, build.Executable): + vala_header = File.from_built_file(self.get_target_dir(target), target.vala_header) + header_deps.append(vala_header) + # Recurse and find generated headers + for dep in itertools.chain(target.link_targets, target.link_whole_targets): + if isinstance(dep, (build.StaticLibrary, build.SharedLibrary)): + header_deps += self.get_generated_headers(dep) + target.cached_generated_headers = header_deps + return header_deps + + def get_target_generated_sources(self, target: build.BuildTarget) -> T.MutableMapping[str, File]: + """ + Returns a dictionary with the keys being the path to the file + (relative to the build directory) and the value being the File object + representing the same path. + """ + srcs: T.MutableMapping[str, File] = OrderedDict() + for gensrc in target.get_generated_sources(): + for s in gensrc.get_outputs(): + rel_src = self.get_target_generated_dir(target, gensrc, s) + srcs[rel_src] = File.from_built_relative(rel_src) + return srcs + + def get_target_sources(self, target: build.BuildTarget) -> T.MutableMapping[str, File]: + srcs: T.MutableMapping[str, File] = OrderedDict() + for s in target.get_sources(): + # BuildTarget sources are always mesonlib.File files which are + # either in the source root, or generated with configure_file and + # in the build root + if not isinstance(s, File): + raise InvalidArguments(f'All sources in target {s!r} must be of type mesonlib.File') + f = s.rel_to_builddir(self.build_to_src) + srcs[f] = s + return srcs + + def get_target_source_can_unity(self, target, source): + if isinstance(source, File): + source = source.fname + if self.environment.is_llvm_ir(source) or \ + self.environment.is_assembly(source): + return False + suffix = os.path.splitext(source)[1][1:].lower() + for lang in backends.LANGS_CANT_UNITY: + if lang not in target.compilers: + continue + if suffix in target.compilers[lang].file_suffixes: + return False + return True + + def create_target_source_introspection(self, target: build.Target, comp: compilers.Compiler, parameters, sources, generated_sources, + unity_sources: T.Optional[T.List[mesonlib.FileOrString]] = None): + ''' + Adds the source file introspection information for a language of a target + + Internal introspection storage format: + self.introspection_data = { + '': { + : { + 'language: 'lang', + 'compiler': ['comp', 'exe', 'list'], + 'parameters': ['UNIQUE', 'parameter', 'list'], + 'sources': [], + 'generated_sources': [], + } + } + } + ''' + tid = target.get_id() + lang = comp.get_language() + tgt = self.introspection_data[tid] + # Find an existing entry or create a new one + id_hash = (lang, tuple(parameters)) + src_block = tgt.get(id_hash, None) + if src_block is None: + # Convert parameters + if isinstance(parameters, CompilerArgs): + parameters = parameters.to_native(copy=True) + parameters = comp.compute_parameters_with_absolute_paths(parameters, self.build_dir) + # The new entry + src_block = { + 'language': lang, + 'compiler': comp.get_exelist(), + 'parameters': parameters, + 'sources': [], + 'generated_sources': [], + 'unity_sources': [], + } + tgt[id_hash] = src_block + + def compute_path(file: mesonlib.FileOrString) -> str: + """ Make source files absolute """ + if isinstance(file, File): + return file.absolute_path(self.source_dir, self.build_dir) + return os.path.normpath(os.path.join(self.build_dir, file)) + + src_block['sources'].extend(compute_path(x) for x in sources) + src_block['generated_sources'].extend(compute_path(x) for x in generated_sources) + if unity_sources: + src_block['unity_sources'].extend(compute_path(x) for x in unity_sources) + + def create_target_linker_introspection(self, target: build.Target, linker: T.Union[Compiler, StaticLinker], parameters): + tid = target.get_id() + tgt = self.introspection_data[tid] + lnk_hash = tuple(parameters) + lnk_block = tgt.get(lnk_hash, None) + if lnk_block is None: + if isinstance(parameters, CompilerArgs): + parameters = parameters.to_native(copy=True) + + if isinstance(linker, Compiler): + linkers = linker.get_linker_exelist() + else: + linkers = linker.get_exelist() + + lnk_block = { + 'linker': linkers, + 'parameters': parameters, + } + tgt[lnk_hash] = lnk_block + + def generate_target(self, target): + try: + if isinstance(target, build.BuildTarget): + os.makedirs(self.get_target_private_dir_abs(target)) + except FileExistsError: + pass + if isinstance(target, build.CustomTarget): + self.generate_custom_target(target) + if isinstance(target, build.RunTarget): + self.generate_run_target(target) + compiled_sources = [] + source2object = {} + name = target.get_id() + if name in self.processed_targets: + return + self.processed_targets.add(name) + # Initialize an empty introspection source list + self.introspection_data[name] = {} + # Generate rules for all dependency targets + self.process_target_dependencies(target) + + self.generate_shlib_aliases(target, self.get_target_dir(target)) + + # If target uses a language that cannot link to C objects, + # just generate for that language and return. + if isinstance(target, build.Jar): + self.generate_jar_target(target) + return + if target.uses_rust(): + self.generate_rust_target(target) + return + if 'cs' in target.compilers: + self.generate_cs_target(target) + return + if 'swift' in target.compilers: + self.generate_swift_target(target) + return + + # CompileTarget compiles all its sources and does not do a final link. + # This is, for example, a preprocessor. + is_compile_target = isinstance(target, build.CompileTarget) + + # Preexisting target C/C++ sources to be built; dict of full path to + # source relative to build root and the original File object. + target_sources: T.MutableMapping[str, File] + + # GeneratedList and CustomTarget sources to be built; dict of the full + # path to source relative to build root and the generating target/list + generated_sources: T.MutableMapping[str, File] + + # List of sources that have been transpiled from a DSL (like Vala) into + # a language that is handled below, such as C or C++ + transpiled_sources: T.List[str] + + if 'vala' in target.compilers: + # Sources consumed by valac are filtered out. These only contain + # C/C++ sources, objects, generated libs, and unknown sources now. + target_sources, generated_sources, \ + transpiled_sources = self.generate_vala_compile(target) + elif 'cython' in target.compilers: + target_sources, generated_sources, \ + transpiled_sources = self.generate_cython_transpile(target) + else: + target_sources = self.get_target_sources(target) + generated_sources = self.get_target_generated_sources(target) + transpiled_sources = [] + self.scan_fortran_module_outputs(target) + # Generate rules for GeneratedLists + self.generate_generator_list_rules(target) + + # Generate rules for building the remaining source files in this target + outname = self.get_target_filename(target) + obj_list = [] + is_unity = target.is_unity + header_deps = [] + unity_src = [] + unity_deps = [] # Generated sources that must be built before compiling a Unity target. + header_deps += self.get_generated_headers(target) + + if is_unity: + # Warn about incompatible sources if a unity build is enabled + langs = set(target.compilers.keys()) + langs_cant = langs.intersection(backends.LANGS_CANT_UNITY) + if langs_cant: + langs_are = langs = ', '.join(langs_cant).upper() + langs_are += ' are' if len(langs_cant) > 1 else ' is' + msg = f'{langs_are} not supported in Unity builds yet, so {langs} ' \ + f'sources in the {target.name!r} target will be compiled normally' + mlog.log(mlog.red('FIXME'), msg) + + # Get a list of all generated headers that will be needed while building + # this target's sources (generated sources and preexisting sources). + # This will be set as dependencies of all the target's sources. At the + # same time, also deal with generated sources that need to be compiled. + generated_source_files = [] + for rel_src in generated_sources.keys(): + raw_src = File.from_built_relative(rel_src) + if self.environment.is_source(rel_src): + if is_unity and self.get_target_source_can_unity(target, rel_src): + unity_deps.append(raw_src) + abs_src = os.path.join(self.environment.get_build_dir(), rel_src) + unity_src.append(abs_src) + else: + generated_source_files.append(raw_src) + elif self.environment.is_object(rel_src): + obj_list.append(rel_src) + elif self.environment.is_library(rel_src) or modules.is_module_library(rel_src): + pass + elif is_compile_target: + generated_source_files.append(raw_src) + else: + # Assume anything not specifically a source file is a header. This is because + # people generate files with weird suffixes (.inc, .fh) that they then include + # in their source files. + header_deps.append(raw_src) + + # For D language, the object of generated source files are added + # as order only deps because other files may depend on them + d_generated_deps = [] + + # These are the generated source files that need to be built for use by + # this target. We create the Ninja build file elements for this here + # because we need `header_deps` to be fully generated in the above loop. + for src in generated_source_files: + if self.environment.is_llvm_ir(src): + o, s = self.generate_llvm_ir_compile(target, src) + else: + o, s = self.generate_single_compile(target, src, True, + order_deps=header_deps) + compiled_sources.append(s) + source2object[s] = o + obj_list.append(o) + if s.split('.')[-1] in compilers.lang_suffixes['d']: + d_generated_deps.append(o) + + use_pch = self.target_uses_pch(target) + if use_pch and target.has_pch(): + pch_objects = self.generate_pch(target, header_deps=header_deps) + else: + pch_objects = [] + + o, od = self.flatten_object_list(target) + obj_targets = [t for t in od if t.uses_fortran()] + obj_list.extend(o) + + fortran_order_deps = [File(True, *os.path.split(self.get_target_filename(t))) for t in obj_targets] + fortran_inc_args: T.List[str] = [] + if target.uses_fortran(): + fortran_inc_args = mesonlib.listify([target.compilers['fortran'].get_include_args( + self.get_target_private_dir(t), is_system=False) for t in obj_targets]) + + # Generate compilation targets for C sources generated from Vala + # sources. This can be extended to other $LANG->C compilers later if + # necessary. This needs to be separate for at least Vala + # + # Do not try to unity-build the generated c files from vala, as these + # often contain duplicate symbols and will fail to compile properly + vala_generated_source_files = [] + for src in transpiled_sources: + raw_src = File.from_built_relative(src) + # Generated targets are ordered deps because the must exist + # before the sources compiling them are used. After the first + # compile we get precise dependency info from dep files. + # This should work in all cases. If it does not, then just + # move them from orderdeps to proper deps. + if self.environment.is_header(src): + header_deps.append(raw_src) + else: + # We gather all these and generate compile rules below + # after `header_deps` (above) is fully generated + vala_generated_source_files.append(raw_src) + for src in vala_generated_source_files: + # Passing 'vala' here signifies that we want the compile + # arguments to be specialized for C code generated by + # valac. For instance, no warnings should be emitted. + o, s = self.generate_single_compile(target, src, 'vala', [], header_deps) + obj_list.append(o) + + # Generate compile targets for all the preexisting sources for this target + for src in target_sources.values(): + if not self.environment.is_header(src) or is_compile_target: + if self.environment.is_llvm_ir(src): + o, s = self.generate_llvm_ir_compile(target, src) + obj_list.append(o) + elif is_unity and self.get_target_source_can_unity(target, src): + abs_src = os.path.join(self.environment.get_build_dir(), + src.rel_to_builddir(self.build_to_src)) + unity_src.append(abs_src) + else: + o, s = self.generate_single_compile(target, src, False, [], + header_deps + d_generated_deps + fortran_order_deps, + fortran_inc_args) + obj_list.append(o) + compiled_sources.append(s) + source2object[s] = o + + if is_unity: + for src in self.generate_unity_files(target, unity_src): + o, s = self.generate_single_compile(target, src, True, unity_deps + header_deps + d_generated_deps, + fortran_order_deps, fortran_inc_args, unity_src) + obj_list.append(o) + compiled_sources.append(s) + source2object[s] = o + if is_compile_target: + # Skip the link stage for this special type of target + return + linker, stdlib_args = self.determine_linker_and_stdlib_args(target) + if isinstance(target, build.StaticLibrary) and target.prelink: + final_obj_list = self.generate_prelink(target, obj_list) + else: + final_obj_list = obj_list + elem = self.generate_link(target, outname, final_obj_list, linker, pch_objects, stdlib_args=stdlib_args) + self.generate_dependency_scan_target(target, compiled_sources, source2object, generated_source_files, fortran_order_deps) + self.add_build(elem) + #In AIX, we archive shared libraries. If the instance is a shared library, we add a command to archive the shared library + #object and create the build element. + if isinstance(target, build.SharedLibrary) and self.environment.machines[target.for_machine].is_aix(): + elem = NinjaBuildElement(self.all_outputs, linker.get_archive_name(outname), 'AIX_LINKER', [outname]) + self.add_build(elem) + + def should_use_dyndeps_for_target(self, target: 'build.BuildTarget') -> bool: + if mesonlib.version_compare(self.ninja_version, '<1.10.0'): + return False + if 'fortran' in target.compilers: + return True + if 'cpp' not in target.compilers: + return False + if '-fmodules-ts' in target.extra_args.get('cpp', []): + return True + # Currently only the preview version of Visual Studio is supported. + cpp = target.compilers['cpp'] + if cpp.get_id() != 'msvc': + return False + cppversion = target.get_option(OptionKey('std', machine=target.for_machine, lang='cpp')) + if cppversion not in ('latest', 'c++latest', 'vc++latest'): + return False + if not mesonlib.current_vs_supports_modules(): + return False + if mesonlib.version_compare(cpp.version, '<19.28.28617'): + return False + return True + + def generate_dependency_scan_target(self, target, compiled_sources, source2object, generated_source_files: T.List[mesonlib.File], + object_deps: T.List['mesonlib.FileOrString']) -> None: + if not self.should_use_dyndeps_for_target(target): + return + depscan_file = self.get_dep_scan_file_for(target) + pickle_base = target.name + '.dat' + pickle_file = os.path.join(self.get_target_private_dir(target), pickle_base).replace('\\', '/') + pickle_abs = os.path.join(self.get_target_private_dir_abs(target), pickle_base).replace('\\', '/') + json_abs = os.path.join(self.get_target_private_dir_abs(target), f'{target.name}-deps.json').replace('\\', '/') + rule_name = 'depscan' + scan_sources = self.select_sources_to_scan(compiled_sources) + + # Dump the sources as a json list. This avoids potential problems where + # the number of sources passed to depscan exceeds the limit imposed by + # the OS. + with open(json_abs, 'w', encoding='utf-8') as f: + json.dump(scan_sources, f) + elem = NinjaBuildElement(self.all_outputs, depscan_file, rule_name, json_abs) + elem.add_item('picklefile', pickle_file) + # Add any generated outputs to the order deps of the scan target, so + # that those sources are present + for g in generated_source_files: + elem.orderdeps.add(g.relative_name()) + elem.orderdeps.update(object_deps) + scaninfo = TargetDependencyScannerInfo(self.get_target_private_dir(target), source2object) + with open(pickle_abs, 'wb') as p: + pickle.dump(scaninfo, p) + self.add_build(elem) + + def select_sources_to_scan(self, compiled_sources): + # in practice pick up C++ and Fortran files. If some other language + # requires scanning (possibly Java to deal with inner class files) + # then add them here. + all_suffixes = set(compilers.lang_suffixes['cpp']) | set(compilers.lang_suffixes['fortran']) + selected_sources = [] + for source in compiled_sources: + ext = os.path.splitext(source)[1][1:] + if ext != 'C': + ext = ext.lower() + if ext in all_suffixes: + selected_sources.append(source) + return selected_sources + + def process_target_dependencies(self, target): + for t in target.get_dependencies(): + if t.get_id() not in self.processed_targets: + self.generate_target(t) + + def custom_target_generator_inputs(self, target): + for s in target.sources: + if isinstance(s, build.GeneratedList): + self.generate_genlist_for_target(s, target) + + def unwrap_dep_list(self, target): + deps = [] + for i in target.get_dependencies(): + # FIXME, should not grab element at zero but rather expand all. + if isinstance(i, list): + i = i[0] + # Add a dependency on all the outputs of this target + for output in i.get_outputs(): + deps.append(os.path.join(self.get_target_dir(i), output)) + return deps + + def generate_custom_target(self, target): + self.custom_target_generator_inputs(target) + (srcs, ofilenames, cmd) = self.eval_custom_target_command(target) + deps = self.unwrap_dep_list(target) + deps += self.get_custom_target_depend_files(target) + if target.build_always_stale: + deps.append('PHONY') + if target.depfile is None: + rulename = 'CUSTOM_COMMAND' + else: + rulename = 'CUSTOM_COMMAND_DEP' + elem = NinjaBuildElement(self.all_outputs, ofilenames, rulename, srcs) + elem.add_dep(deps) + for d in target.extra_depends: + # Add a dependency on all the outputs of this target + for output in d.get_outputs(): + elem.add_dep(os.path.join(self.get_target_dir(d), output)) + + cmd, reason = self.as_meson_exe_cmdline(target.command[0], cmd[1:], + extra_bdeps=target.get_transitive_build_target_deps(), + capture=ofilenames[0] if target.capture else None, + feed=srcs[0] if target.feed else None, + env=target.env, + verbose=target.console) + if reason: + cmd_type = f' (wrapped by meson {reason})' + else: + cmd_type = '' + if target.depfile is not None: + depfile = target.get_dep_outname(elem.infilenames) + rel_dfile = os.path.join(self.get_target_dir(target), depfile) + abs_pdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target)) + os.makedirs(abs_pdir, exist_ok=True) + elem.add_item('DEPFILE', rel_dfile) + if target.console: + elem.add_item('pool', 'console') + full_name = Path(target.subdir, target.name).as_posix() + elem.add_item('COMMAND', cmd) + elem.add_item('description', f'Generating {full_name} with a custom command{cmd_type}') + self.add_build(elem) + self.processed_targets.add(target.get_id()) + + def build_run_target_name(self, target): + if target.subproject != '': + subproject_prefix = f'{target.subproject}@@' + else: + subproject_prefix = '' + return f'{subproject_prefix}{target.name}' + + def generate_run_target(self, target: build.RunTarget): + target_name = self.build_run_target_name(target) + if not target.command: + # This is an alias target, it has no command, it just depends on + # other targets. + elem = NinjaBuildElement(self.all_outputs, target_name, 'phony', []) + else: + target_env = self.get_run_target_env(target) + _, _, cmd = self.eval_custom_target_command(target) + meson_exe_cmd, reason = self.as_meson_exe_cmdline(target.command[0], cmd[1:], + env=target_env, + verbose=True) + cmd_type = f' (wrapped by meson {reason})' if reason else '' + elem = self.create_phony_target(self.all_outputs, target_name, 'CUSTOM_COMMAND', []) + elem.add_item('COMMAND', meson_exe_cmd) + elem.add_item('description', f'Running external command {target.name}{cmd_type}') + elem.add_item('pool', 'console') + deps = self.unwrap_dep_list(target) + deps += self.get_custom_target_depend_files(target) + elem.add_dep(deps) + self.add_build(elem) + self.processed_targets.add(target.get_id()) + + def generate_coverage_command(self, elem, outputs): + targets = self.build.get_targets().values() + use_llvm_cov = False + for target in targets: + if not hasattr(target, 'compilers'): + continue + for compiler in target.compilers.values(): + if compiler.get_id() == 'clang' and not compiler.info.is_darwin(): + use_llvm_cov = True + break + elem.add_item('COMMAND', self.environment.get_build_command() + + ['--internal', 'coverage'] + + outputs + + [self.environment.get_source_dir(), + os.path.join(self.environment.get_source_dir(), + self.build.get_subproject_dir()), + self.environment.get_build_dir(), + self.environment.get_log_dir()] + + (['--use_llvm_cov'] if use_llvm_cov else [])) + + def generate_coverage_rules(self, gcovr_exe: T.Optional[str], gcovr_version: T.Optional[str]): + e = self.create_phony_target(self.all_outputs, 'coverage', 'CUSTOM_COMMAND', 'PHONY') + self.generate_coverage_command(e, []) + e.add_item('description', 'Generates coverage reports') + self.add_build(e) + self.generate_coverage_legacy_rules(gcovr_exe, gcovr_version) + + def generate_coverage_legacy_rules(self, gcovr_exe: T.Optional[str], gcovr_version: T.Optional[str]): + e = self.create_phony_target(self.all_outputs, 'coverage-html', 'CUSTOM_COMMAND', 'PHONY') + self.generate_coverage_command(e, ['--html']) + e.add_item('description', 'Generates HTML coverage report') + self.add_build(e) + + if gcovr_exe: + e = self.create_phony_target(self.all_outputs, 'coverage-xml', 'CUSTOM_COMMAND', 'PHONY') + self.generate_coverage_command(e, ['--xml']) + e.add_item('description', 'Generates XML coverage report') + self.add_build(e) + + e = self.create_phony_target(self.all_outputs, 'coverage-text', 'CUSTOM_COMMAND', 'PHONY') + self.generate_coverage_command(e, ['--text']) + e.add_item('description', 'Generates text coverage report') + self.add_build(e) + + if mesonlib.version_compare(gcovr_version, '>=4.2'): + e = self.create_phony_target(self.all_outputs, 'coverage-sonarqube', 'CUSTOM_COMMAND', 'PHONY') + self.generate_coverage_command(e, ['--sonarqube']) + e.add_item('description', 'Generates Sonarqube XML coverage report') + self.add_build(e) + + def generate_install(self): + self.create_install_data_files() + elem = self.create_phony_target(self.all_outputs, 'install', 'CUSTOM_COMMAND', 'PHONY') + elem.add_dep('all') + elem.add_item('DESC', 'Installing files.') + elem.add_item('COMMAND', self.environment.get_build_command() + ['install', '--no-rebuild']) + elem.add_item('pool', 'console') + self.add_build(elem) + + def generate_tests(self): + self.serialize_tests() + cmd = self.environment.get_build_command(True) + ['test', '--no-rebuild'] + if not self.environment.coredata.get_option(OptionKey('stdsplit')): + cmd += ['--no-stdsplit'] + if self.environment.coredata.get_option(OptionKey('errorlogs')): + cmd += ['--print-errorlogs'] + elem = self.create_phony_target(self.all_outputs, 'test', 'CUSTOM_COMMAND', ['all', 'PHONY']) + elem.add_item('COMMAND', cmd) + elem.add_item('DESC', 'Running all tests.') + elem.add_item('pool', 'console') + self.add_build(elem) + + # And then benchmarks. + cmd = self.environment.get_build_command(True) + [ + 'test', '--benchmark', '--logbase', + 'benchmarklog', '--num-processes=1', '--no-rebuild'] + elem = self.create_phony_target(self.all_outputs, 'benchmark', 'CUSTOM_COMMAND', ['all', 'PHONY']) + elem.add_item('COMMAND', cmd) + elem.add_item('DESC', 'Running benchmark suite.') + elem.add_item('pool', 'console') + self.add_build(elem) + + def generate_rules(self): + self.rules = [] + self.ruledict = {} + + self.add_rule_comment(NinjaComment('Rules for module scanning.')) + self.generate_scanner_rules() + self.add_rule_comment(NinjaComment('Rules for compiling.')) + self.generate_compile_rules() + self.add_rule_comment(NinjaComment('Rules for linking.')) + self.generate_static_link_rules() + self.generate_dynamic_link_rules() + self.add_rule_comment(NinjaComment('Other rules')) + # Ninja errors out if you have deps = gcc but no depfile, so we must + # have two rules for custom commands. + self.add_rule(NinjaRule('CUSTOM_COMMAND', ['$COMMAND'], [], '$DESC', + extra='restat = 1')) + self.add_rule(NinjaRule('CUSTOM_COMMAND_DEP', ['$COMMAND'], [], '$DESC', + deps='gcc', depfile='$DEPFILE', + extra='restat = 1')) + self.add_rule(NinjaRule('COPY_FILE', self.environment.get_build_command() + ['--internal', 'copy'], + ['$in', '$out'], 'Copying $in to $out')) + + c = self.environment.get_build_command() + \ + ['--internal', + 'regenerate', + self.environment.get_source_dir(), + # Ninja always runs from the build_dir. This includes cases where the user moved the + # build directory and invalidated most references. Make sure it still regenerates. + '.'] + self.add_rule(NinjaRule('REGENERATE_BUILD', + c, [], + 'Regenerating build files.', + extra='generator = 1')) + + def add_rule_comment(self, comment): + self.rules.append(comment) + + def add_build_comment(self, comment): + self.build_elements.append(comment) + + def add_rule(self, rule): + if rule.name in self.ruledict: + raise MesonException(f'Tried to add rule {rule.name} twice.') + self.rules.append(rule) + self.ruledict[rule.name] = rule + + def add_build(self, build): + build.check_outputs() + self.build_elements.append(build) + + if build.rulename != 'phony': + # reference rule + if build.rulename in self.ruledict: + build.rule = self.ruledict[build.rulename] + else: + mlog.warning(f"build statement for {build.outfilenames} references nonexistent rule {build.rulename}") + + def write_rules(self, outfile): + for b in self.build_elements: + if isinstance(b, NinjaBuildElement): + b.count_rule_references() + + for r in self.rules: + r.write(outfile) + + def write_builds(self, outfile): + for b in ProgressBar(self.build_elements, desc='Writing build.ninja'): + b.write(outfile) + + def generate_phony(self): + self.add_build_comment(NinjaComment('Phony build target, always out of date')) + elem = NinjaBuildElement(self.all_outputs, 'PHONY', 'phony', '') + self.add_build(elem) + + def generate_jar_target(self, target: build.Jar): + fname = target.get_filename() + outname_rel = os.path.join(self.get_target_dir(target), fname) + src_list = target.get_sources() + resources = target.get_java_resources() + class_list = [] + compiler = target.compilers['java'] + c = 'c' + m = 'm' + e = '' + f = 'f' + main_class = target.get_main_class() + if main_class != '': + e = 'e' + + # Add possible java generated files to src list + generated_sources = self.get_target_generated_sources(target) + gen_src_list = [] + for rel_src in generated_sources.keys(): + raw_src = File.from_built_relative(rel_src) + if rel_src.endswith('.java'): + gen_src_list.append(raw_src) + + compile_args = self.determine_single_java_compile_args(target, compiler) + for src in src_list + gen_src_list: + plain_class_path = self.generate_single_java_compile(src, target, compiler, compile_args) + class_list.append(plain_class_path) + class_dep_list = [os.path.join(self.get_target_private_dir(target), i) for i in class_list] + manifest_path = os.path.join(self.get_target_private_dir(target), 'META-INF', 'MANIFEST.MF') + manifest_fullpath = os.path.join(self.environment.get_build_dir(), manifest_path) + os.makedirs(os.path.dirname(manifest_fullpath), exist_ok=True) + with open(manifest_fullpath, 'w', encoding='utf-8') as manifest: + if any(target.link_targets): + manifest.write('Class-Path: ') + cp_paths = [os.path.join(self.get_target_dir(l), l.get_filename()) for l in target.link_targets] + manifest.write(' '.join(cp_paths)) + manifest.write('\n') + jar_rule = 'java_LINKER' + commands = [c + m + e + f] + commands.append(manifest_path) + if e != '': + commands.append(main_class) + commands.append(self.get_target_filename(target)) + # Java compilation can produce an arbitrary number of output + # class files for a single source file. Thus tell jar to just + # grab everything in the final package. + commands += ['-C', self.get_target_private_dir(target), '.'] + elem = NinjaBuildElement(self.all_outputs, outname_rel, jar_rule, []) + elem.add_dep(class_dep_list) + if resources: + # Copy all resources into the root of the jar. + elem.add_orderdep(self.__generate_sources_structure(Path(self.get_target_private_dir(target)), resources)[0]) + elem.add_item('ARGS', commands) + self.add_build(elem) + # Create introspection information + self.create_target_source_introspection(target, compiler, compile_args, src_list, gen_src_list) + + def generate_cs_resource_tasks(self, target): + args = [] + deps = [] + for r in target.resources: + rel_sourcefile = os.path.join(self.build_to_src, target.subdir, r) + if r.endswith('.resources'): + a = '-resource:' + rel_sourcefile + elif r.endswith('.txt') or r.endswith('.resx'): + ofilebase = os.path.splitext(os.path.basename(r))[0] + '.resources' + ofilename = os.path.join(self.get_target_private_dir(target), ofilebase) + elem = NinjaBuildElement(self.all_outputs, ofilename, "CUSTOM_COMMAND", rel_sourcefile) + elem.add_item('COMMAND', ['resgen', rel_sourcefile, ofilename]) + elem.add_item('DESC', f'Compiling resource {rel_sourcefile}') + self.add_build(elem) + deps.append(ofilename) + a = '-resource:' + ofilename + else: + raise InvalidArguments(f'Unknown resource file {r}.') + args.append(a) + return args, deps + + def generate_cs_target(self, target: build.BuildTarget): + buildtype = target.get_option(OptionKey('buildtype')) + fname = target.get_filename() + outname_rel = os.path.join(self.get_target_dir(target), fname) + src_list = target.get_sources() + compiler = target.compilers['cs'] + rel_srcs = [os.path.normpath(s.rel_to_builddir(self.build_to_src)) for s in src_list] + deps = [] + commands = compiler.compiler_args(target.extra_args.get('cs', [])) + commands += compiler.get_buildtype_args(buildtype) + commands += compiler.get_optimization_args(target.get_option(OptionKey('optimization'))) + commands += compiler.get_debug_args(target.get_option(OptionKey('debug'))) + if isinstance(target, build.Executable): + commands.append('-target:exe') + elif isinstance(target, build.SharedLibrary): + commands.append('-target:library') + else: + raise MesonException('Unknown C# target type.') + (resource_args, resource_deps) = self.generate_cs_resource_tasks(target) + commands += resource_args + deps += resource_deps + commands += compiler.get_output_args(outname_rel) + for l in target.link_targets: + lname = os.path.join(self.get_target_dir(l), l.get_filename()) + commands += compiler.get_link_args(lname) + deps.append(lname) + if '-g' in commands: + outputs = [outname_rel, outname_rel + '.mdb'] + else: + outputs = [outname_rel] + generated_sources = self.get_target_generated_sources(target) + generated_rel_srcs = [] + for rel_src in generated_sources.keys(): + if rel_src.lower().endswith('.cs'): + generated_rel_srcs.append(os.path.normpath(rel_src)) + deps.append(os.path.normpath(rel_src)) + + for dep in target.get_external_deps(): + commands.extend_direct(dep.get_link_args()) + commands += self.build.get_project_args(compiler, target.subproject, target.for_machine) + commands += self.build.get_global_args(compiler, target.for_machine) + + elem = NinjaBuildElement(self.all_outputs, outputs, self.compiler_to_rule_name(compiler), rel_srcs + generated_rel_srcs) + elem.add_dep(deps) + elem.add_item('ARGS', commands) + self.add_build(elem) + + self.generate_generator_list_rules(target) + self.create_target_source_introspection(target, compiler, commands, rel_srcs, generated_rel_srcs) + + def determine_single_java_compile_args(self, target, compiler): + args = [] + args += compiler.get_buildtype_args(target.get_option(OptionKey('buildtype'))) + args += self.build.get_global_args(compiler, target.for_machine) + args += self.build.get_project_args(compiler, target.subproject, target.for_machine) + args += target.get_java_args() + args += compiler.get_output_args(self.get_target_private_dir(target)) + args += target.get_classpath_args() + curdir = target.get_subdir() + sourcepath = os.path.join(self.build_to_src, curdir) + os.pathsep + sourcepath += os.path.normpath(curdir) + os.pathsep + for i in target.include_dirs: + for idir in i.get_incdirs(): + sourcepath += os.path.join(self.build_to_src, i.curdir, idir) + os.pathsep + args += ['-sourcepath', sourcepath] + return args + + def generate_single_java_compile(self, src, target, compiler, args): + deps = [os.path.join(self.get_target_dir(l), l.get_filename()) for l in target.link_targets] + generated_sources = self.get_target_generated_sources(target) + for rel_src in generated_sources.keys(): + if rel_src.endswith('.java'): + deps.append(rel_src) + rel_src = src.rel_to_builddir(self.build_to_src) + plain_class_path = src.fname[:-4] + 'class' + rel_obj = os.path.join(self.get_target_private_dir(target), plain_class_path) + element = NinjaBuildElement(self.all_outputs, rel_obj, self.compiler_to_rule_name(compiler), rel_src) + element.add_dep(deps) + element.add_item('ARGS', args) + self.add_build(element) + return plain_class_path + + def generate_java_link(self): + rule = 'java_LINKER' + command = ['jar', '$ARGS'] + description = 'Creating JAR $out' + self.add_rule(NinjaRule(rule, command, [], description)) + + def determine_dep_vapis(self, target): + """ + Peek into the sources of BuildTargets we're linking with, and if any of + them was built with Vala, assume that it also generated a .vapi file of + the same name as the BuildTarget and return the path to it relative to + the build directory. + """ + result = OrderedSet() + for dep in itertools.chain(target.link_targets, target.link_whole_targets): + if not dep.is_linkable_target(): + continue + for i in dep.sources: + if hasattr(i, 'fname'): + i = i.fname + if i.split('.')[-1] in compilers.lang_suffixes['vala']: + vapiname = dep.vala_vapi + fullname = os.path.join(self.get_target_dir(dep), vapiname) + result.add(fullname) + break + return list(result) + + def split_vala_sources(self, t: build.BuildTarget) -> \ + T.Tuple[T.MutableMapping[str, File], T.MutableMapping[str, File], + T.Tuple[T.MutableMapping[str, File], T.MutableMapping]]: + """ + Splits the target's sources into .vala, .gs, .vapi, and other sources. + Handles both preexisting and generated sources. + + Returns a tuple (vala, vapi, others) each of which is a dictionary with + the keys being the path to the file (relative to the build directory) + and the value being the object that generated or represents the file. + """ + vala: T.MutableMapping[str, File] = OrderedDict() + vapi: T.MutableMapping[str, File] = OrderedDict() + others: T.MutableMapping[str, File] = OrderedDict() + othersgen: T.MutableMapping[str, File] = OrderedDict() + # Split preexisting sources + for s in t.get_sources(): + # BuildTarget sources are always mesonlib.File files which are + # either in the source root, or generated with configure_file and + # in the build root + if not isinstance(s, File): + raise InvalidArguments(f'All sources in target {t!r} must be of type mesonlib.File, not {s!r}') + f = s.rel_to_builddir(self.build_to_src) + if s.endswith(('.vala', '.gs')): + srctype = vala + elif s.endswith('.vapi'): + srctype = vapi + else: + srctype = others + srctype[f] = s + # Split generated sources + for gensrc in t.get_generated_sources(): + for s in gensrc.get_outputs(): + f = self.get_target_generated_dir(t, gensrc, s) + if s.endswith(('.vala', '.gs')): + srctype = vala + elif s.endswith('.vapi'): + srctype = vapi + # Generated non-Vala (C/C++) sources. Won't be used for + # generating the Vala compile rule below. + else: + srctype = othersgen + # Duplicate outputs are disastrous + if f in srctype and srctype[f] is not gensrc: + msg = 'Duplicate output {0!r} from {1!r} {2!r}; ' \ + 'conflicts with {0!r} from {4!r} {3!r}' \ + ''.format(f, type(gensrc).__name__, gensrc.name, + srctype[f].name, type(srctype[f]).__name__) + raise InvalidArguments(msg) + # Store 'somefile.vala': GeneratedList (or CustomTarget) + srctype[f] = gensrc + return vala, vapi, (others, othersgen) + + def generate_vala_compile(self, target: build.BuildTarget) -> \ + T.Tuple[T.MutableMapping[str, File], T.MutableMapping[str, File], T.List[str]]: + """Vala is compiled into C. Set up all necessary build steps here.""" + (vala_src, vapi_src, other_src) = self.split_vala_sources(target) + extra_dep_files = [] + if not vala_src: + raise InvalidArguments(f'Vala library {target.name!r} has no Vala or Genie source files.') + + valac = target.compilers['vala'] + c_out_dir = self.get_target_private_dir(target) + # C files generated by valac + vala_c_src: T.List[str] = [] + # Files generated by valac + valac_outputs: T.List = [] + # All sources that are passed to valac on the commandline + all_files = list(vapi_src) + # Passed as --basedir + srcbasedir = os.path.join(self.build_to_src, target.get_subdir()) + for (vala_file, gensrc) in vala_src.items(): + all_files.append(vala_file) + # Figure out where the Vala compiler will write the compiled C file + # + # If the Vala file is in a subdir of the build dir (in our case + # because it was generated/built by something else), and is also + # a subdir of --basedir (because the builddir is in the source + # tree, and the target subdir is the source root), the subdir + # components from the source root till the private builddir will be + # duplicated inside the private builddir. Otherwise, just the + # basename will be used. + # + # If the Vala file is outside the build directory, the paths from + # the --basedir till the subdir will be duplicated inside the + # private builddir. + if isinstance(gensrc, (build.CustomTarget, build.GeneratedList)) or gensrc.is_built: + vala_c_file = os.path.splitext(os.path.basename(vala_file))[0] + '.c' + # Check if the vala file is in a subdir of --basedir + abs_srcbasedir = os.path.join(self.environment.get_source_dir(), target.get_subdir()) + abs_vala_file = os.path.join(self.environment.get_build_dir(), vala_file) + if PurePath(os.path.commonpath((abs_srcbasedir, abs_vala_file))) == PurePath(abs_srcbasedir): + vala_c_subdir = PurePath(abs_vala_file).parent.relative_to(abs_srcbasedir) + vala_c_file = os.path.join(str(vala_c_subdir), vala_c_file) + else: + path_to_target = os.path.join(self.build_to_src, target.get_subdir()) + if vala_file.startswith(path_to_target): + vala_c_file = os.path.splitext(os.path.relpath(vala_file, path_to_target))[0] + '.c' + else: + vala_c_file = os.path.splitext(os.path.basename(vala_file))[0] + '.c' + # All this will be placed inside the c_out_dir + vala_c_file = os.path.join(c_out_dir, vala_c_file) + vala_c_src.append(vala_c_file) + valac_outputs.append(vala_c_file) + + args = self.generate_basic_compiler_args(target, valac) + args += valac.get_colorout_args(target.get_option(OptionKey('b_colorout'))) + # Tell Valac to output everything in our private directory. Sadly this + # means it will also preserve the directory components of Vala sources + # found inside the build tree (generated sources). + args += ['--directory', c_out_dir] + args += ['--basedir', srcbasedir] + if target.is_linkable_target(): + # Library name + args += ['--library', target.name] + # Outputted header + hname = os.path.join(self.get_target_dir(target), target.vala_header) + args += ['--header', hname] + if target.is_unity: + # Without this the declarations will get duplicated in the .c + # files and cause a build failure when all of them are + # #include-d in one .c file. + # https://github.com/mesonbuild/meson/issues/1969 + args += ['--use-header'] + valac_outputs.append(hname) + # Outputted vapi file + vapiname = os.path.join(self.get_target_dir(target), target.vala_vapi) + # Force valac to write the vapi and gir files in the target build dir. + # Without this, it will write it inside c_out_dir + args += ['--vapi', os.path.join('..', target.vala_vapi)] + valac_outputs.append(vapiname) + target.outputs += [target.vala_header, target.vala_vapi] + target.install_tag += ['devel', 'devel'] + # Install header and vapi to default locations if user requests this + if len(target.install_dir) > 1 and target.install_dir[1] is True: + target.install_dir[1] = self.environment.get_includedir() + if len(target.install_dir) > 2 and target.install_dir[2] is True: + target.install_dir[2] = os.path.join(self.environment.get_datadir(), 'vala', 'vapi') + # Generate GIR if requested + if isinstance(target.vala_gir, str): + girname = os.path.join(self.get_target_dir(target), target.vala_gir) + args += ['--gir', os.path.join('..', target.vala_gir)] + valac_outputs.append(girname) + target.outputs.append(target.vala_gir) + target.install_tag.append('devel') + # Install GIR to default location if requested by user + if len(target.install_dir) > 3 and target.install_dir[3] is True: + target.install_dir[3] = os.path.join(self.environment.get_datadir(), 'gir-1.0') + # Detect gresources and add --gresources arguments for each + for gensrc in other_src[1].values(): + if isinstance(gensrc, modules.GResourceTarget): + gres_xml, = self.get_custom_target_sources(gensrc) + args += ['--gresources=' + gres_xml] + extra_args = [] + + for a in target.extra_args.get('vala', []): + if isinstance(a, File): + relname = a.rel_to_builddir(self.build_to_src) + extra_dep_files.append(relname) + extra_args.append(relname) + else: + extra_args.append(a) + dependency_vapis = self.determine_dep_vapis(target) + extra_dep_files += dependency_vapis + args += extra_args + element = NinjaBuildElement(self.all_outputs, valac_outputs, + self.compiler_to_rule_name(valac), + all_files + dependency_vapis) + element.add_item('ARGS', args) + element.add_dep(extra_dep_files) + self.add_build(element) + self.create_target_source_introspection(target, valac, args, all_files, []) + return other_src[0], other_src[1], vala_c_src + + def generate_cython_transpile(self, target: build.BuildTarget) -> \ + T.Tuple[T.MutableMapping[str, File], T.MutableMapping[str, File], T.List[str]]: + """Generate rules for transpiling Cython files to C or C++ + + XXX: Currently only C is handled. + """ + static_sources: T.MutableMapping[str, File] = OrderedDict() + generated_sources: T.MutableMapping[str, File] = OrderedDict() + cython_sources: T.List[str] = [] + + cython = target.compilers['cython'] + + args: T.List[str] = [] + args += cython.get_always_args() + args += cython.get_buildtype_args(target.get_option(OptionKey('buildtype'))) + args += cython.get_debug_args(target.get_option(OptionKey('debug'))) + args += cython.get_optimization_args(target.get_option(OptionKey('optimization'))) + args += cython.get_option_compile_args(target.get_options()) + args += self.build.get_global_args(cython, target.for_machine) + args += self.build.get_project_args(cython, target.subproject, target.for_machine) + args += target.get_extra_args('cython') + + ext = target.get_option(OptionKey('language', machine=target.for_machine, lang='cython')) + + pyx_sources = [] # Keep track of sources we're adding to build + + for src in target.get_sources(): + if src.endswith('.pyx'): + output = os.path.join(self.get_target_private_dir(target), f'{src}.{ext}') + element = NinjaBuildElement( + self.all_outputs, [output], + self.compiler_to_rule_name(cython), + [src.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir())]) + element.add_item('ARGS', args) + self.add_build(element) + # TODO: introspection? + cython_sources.append(output) + pyx_sources.append(element) + else: + static_sources[src.rel_to_builddir(self.build_to_src)] = src + + header_deps = [] # Keep track of generated headers for those sources + for gen in target.get_generated_sources(): + for ssrc in gen.get_outputs(): + if isinstance(gen, GeneratedList): + ssrc = os.path.join(self.get_target_private_dir(target), ssrc) + else: + ssrc = os.path.join(gen.get_subdir(), ssrc) + if ssrc.endswith('.pyx'): + output = os.path.join(self.get_target_private_dir(target), f'{ssrc}.{ext}') + element = NinjaBuildElement( + self.all_outputs, [output], + self.compiler_to_rule_name(cython), + [ssrc]) + element.add_item('ARGS', args) + self.add_build(element) + pyx_sources.append(element) + # TODO: introspection? + cython_sources.append(output) + else: + generated_sources[ssrc] = mesonlib.File.from_built_file(gen.get_subdir(), ssrc) + # Following logic in L883-900 where we determine whether to add generated source + # as a header(order-only) dep to the .so compilation rule + if not self.environment.is_source(ssrc) and \ + not self.environment.is_object(ssrc) and \ + not self.environment.is_library(ssrc) and \ + not modules.is_module_library(ssrc): + header_deps.append(ssrc) + for source in pyx_sources: + source.add_orderdep(header_deps) + + return static_sources, generated_sources, cython_sources + + def _generate_copy_target(self, src: 'mesonlib.FileOrString', output: Path) -> None: + """Create a target to copy a source file from one location to another.""" + if isinstance(src, File): + instr = src.absolute_path(self.environment.source_dir, self.environment.build_dir) + else: + instr = src + elem = NinjaBuildElement(self.all_outputs, [str(output)], 'COPY_FILE', [instr]) + elem.add_orderdep(instr) + self.add_build(elem) + + def __generate_sources_structure(self, root: Path, structured_sources: build.StructuredSources) -> T.Tuple[T.List[str], T.Optional[str]]: + first_file: T.Optional[str] = None + orderdeps: T.List[str] = [] + for path, files in structured_sources.sources.items(): + for file in files: + if isinstance(file, File): + out = root / path / Path(file.fname).name + orderdeps.append(str(out)) + self._generate_copy_target(file, out) + if first_file is None: + first_file = str(out) + else: + for f in file.get_outputs(): + out = root / path / f + orderdeps.append(str(out)) + self._generate_copy_target(str(Path(file.subdir) / f), out) + if first_file is None: + first_file = str(out) + return orderdeps, first_file + + def _add_rust_project_entry(self, name: str, main_rust_file: str, args: CompilerArgs, + from_subproject: bool, proc_macro_dylib_path: T.Optional[str], + deps: T.List[RustDep]) -> None: + raw_edition: T.Optional[str] = mesonlib.first(reversed(args), lambda x: x.startswith('--edition')) + edition: RUST_EDITIONS = '2015' if not raw_edition else raw_edition.split('=')[-1] + + cfg: T.List[str] = [] + arg_itr: T.Iterator[str] = iter(args) + for arg in arg_itr: + if arg == '--cfg': + cfg.append(next(arg_itr)) + elif arg.startswith('--cfg'): + cfg.append(arg[len('--cfg'):]) + + crate = RustCrate( + len(self.rust_crates), + name, + main_rust_file, + edition, + deps, + cfg, + is_workspace_member=not from_subproject, + is_proc_macro=proc_macro_dylib_path is not None, + proc_macro_dylib_path=proc_macro_dylib_path, + ) + + self.rust_crates[name] = crate + + def _get_rust_dependency_name(self, target: build.BuildTarget, dependency: LibTypes) -> str: + # Convert crate names with dashes to underscores by default like + # cargo does as dashes can't be used as parts of identifiers + # in Rust + return target.rust_dependency_map.get(dependency.name, dependency.name).replace('-', '_') + + def generate_rust_target(self, target: build.BuildTarget) -> None: + rustc = target.compilers['rust'] + # Rust compiler takes only the main file as input and + # figures out what other files are needed via import + # statements and magic. + base_proxy = target.get_options() + args = rustc.compiler_args() + # Compiler args for compiling this target + args += compilers.get_base_compile_args(base_proxy, rustc) + self.generate_generator_list_rules(target) + + # dependencies need to cause a relink, they're not just for ordering + deps = [ + os.path.join(t.subdir, t.get_filename()) + for t in itertools.chain(target.link_targets, target.link_whole_targets) + ] + + # Dependencies for rust-project.json + project_deps: T.List[RustDep] = [] + + orderdeps: T.List[str] = [] + + main_rust_file = None + if target.structured_sources: + if target.structured_sources.needs_copy(): + _ods, main_rust_file = self.__generate_sources_structure(Path( + self.get_target_private_dir(target)) / 'structured', target.structured_sources) + orderdeps.extend(_ods) + else: + # The only way to get here is to have only files in the "root" + # positional argument, which are all generated into the same + # directory + g = target.structured_sources.first_file() + + if isinstance(g, File): + main_rust_file = g.rel_to_builddir(self.build_to_src) + elif isinstance(g, GeneratedList): + main_rust_file = os.path.join(self.get_target_private_dir(target), g.get_outputs()[0]) + else: + main_rust_file = os.path.join(g.get_subdir(), g.get_outputs()[0]) + + for f in target.structured_sources.as_list(): + if isinstance(f, File): + orderdeps.append(f.rel_to_builddir(self.build_to_src)) + else: + orderdeps.extend([os.path.join(self.build_to_src, f.subdir, s) + for s in f.get_outputs()]) + + for i in target.get_sources(): + if not rustc.can_compile(i): + raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.') + if main_rust_file is None: + main_rust_file = i.rel_to_builddir(self.build_to_src) + for g in target.get_generated_sources(): + for i in g.get_outputs(): + if not rustc.can_compile(i): + raise InvalidArguments(f'Rust target {target.get_basename()} contains a non-rust source file.') + if isinstance(g, GeneratedList): + fname = os.path.join(self.get_target_private_dir(target), i) + else: + fname = os.path.join(g.get_subdir(), i) + if main_rust_file is None: + main_rust_file = fname + orderdeps.append(fname) + if main_rust_file is None: + raise RuntimeError('A Rust target has no Rust sources. This is weird. Also a bug. Please report') + target_name = os.path.join(target.subdir, target.get_filename()) + if isinstance(target, build.Executable): + cratetype = 'bin' + elif hasattr(target, 'rust_crate_type'): + cratetype = target.rust_crate_type + elif isinstance(target, build.SharedLibrary): + cratetype = 'dylib' + elif isinstance(target, build.StaticLibrary): + cratetype = 'rlib' + else: + raise InvalidArguments('Unknown target type for rustc.') + args.extend(['--crate-type', cratetype]) + + # If we're dynamically linking, add those arguments + # + # Rust is super annoying, calling -C link-arg foo does not work, it has + # to be -C link-arg=foo + if cratetype in {'bin', 'dylib'}: + args.extend(rustc.get_linker_always_args()) + + args += self.generate_basic_compiler_args(target, rustc, False) + # Rustc replaces - with _. spaces or dots are not allowed, so we replace them with underscores + args += ['--crate-name', target.name.replace('-', '_').replace(' ', '_').replace('.', '_')] + depfile = os.path.join(target.subdir, target.name + '.d') + args += ['--emit', f'dep-info={depfile}', '--emit', 'link'] + args += target.get_extra_args('rust') + output = rustc.get_output_args(os.path.join(target.subdir, target.get_filename())) + args += output + linkdirs = mesonlib.OrderedSet() + external_deps = target.external_deps.copy() + + # Have we already injected msvc-crt args? + # + # If we don't have A C, C++, or Fortran compiler that is + # VisualStudioLike treat this as if we've already injected them + # + # We handle this here rather than in the rust compiler because in + # general we don't want to link rust targets to a non-default crt. + # However, because of the way that MSCRTs work you can only link to one + # per target, so if A links to the debug one, and B links to the normal + # one you can't link A and B. Rust is hardcoded to the default one, + # so if we compile C/C++ code and link against a non-default MSCRT then + # linking will fail. We can work around this by injecting MSCRT link + # arguments early in the rustc command line + # https://github.com/rust-lang/rust/issues/39016 + crt_args_injected = not any(x is not None and x.get_argument_syntax() == 'msvc' for x in + (self.environment.coredata.compilers[target.for_machine].get(l) + for l in ['c', 'cpp', 'fortran'])) + + crt_link_args: T.List[str] = [] + try: + buildtype = target.get_option(OptionKey('buildtype')) + crt = target.get_option(OptionKey('b_vscrt')) + is_debug = buildtype == 'debug' + + if crt == 'from_buildtype': + crt = 'mdd' if is_debug else 'md' + elif crt == 'static_from_buildtype': + crt = 'mtd' if is_debug else 'mt' + + if crt == 'mdd': + crt_link_args = ['-l', 'static=msvcrtd'] + elif crt == 'md': + # this is the default, no need to inject anything + crt_args_injected = True + elif crt == 'mtd': + crt_link_args = ['-l', 'static=libcmtd'] + elif crt == 'mt': + crt_link_args = ['-l', 'static=libcmt'] + + except KeyError: + crt_args_injected = True + + # TODO: we likely need to use verbatim to handle name_prefix and name_suffix + for d in target.link_targets: + linkdirs.add(d.subdir) + # staticlib and cdylib provide a plain C ABI, i.e. contain no Rust + # metadata. As such they should be treated like any other external + # link target + if d.uses_rust() and d.rust_crate_type not in ['staticlib', 'cdylib']: + # specify `extern CRATE_NAME=OUTPUT_FILE` for each Rust + # dependency, so that collisions with libraries in rustc's + # sysroot don't cause ambiguity + d_name = self._get_rust_dependency_name(target, d) + args += ['--extern', '{}={}'.format(d_name, os.path.join(d.subdir, d.filename))] + project_deps.append(RustDep(d_name, self.rust_crates[d.name].order)) + continue + + if not crt_args_injected and not {'c', 'cpp', 'fortran'}.isdisjoint(d.compilers): + args += crt_link_args + crt_args_injected = True + + if isinstance(d, build.StaticLibrary): + # Rustc doesn't follow Meson's convention that static libraries + # are called .a, and import libraries are .lib, so we have to + # manually handle that. + if rustc.linker.id in {'link', 'lld-link'}: + args += ['-C', f'link-arg={self.get_target_filename_for_linking(d)}'] + else: + args += ['-l', f'static={d.name}'] + external_deps.extend(d.external_deps) + else: + # Rust uses -l for non rust dependencies, but we still need to + # add dylib=foo + args += ['-l', f'dylib={d.name}'] + + # Since 1.61.0 Rust has a special modifier for whole-archive linking, + # before that it would treat linking two static libraries as + # whole-archive linking. However, to make this work we have to disable + # bundling, which can't be done until 1.63.0… So for 1.61–1.62 we just + # have to hope that the default cases of +whole-archive are sufficient. + # See: https://github.com/rust-lang/rust/issues/99429 + if mesonlib.version_compare(rustc.version, '>= 1.63.0'): + whole_archive = ':+whole-archive,-bundle' + else: + whole_archive = '' + + if mesonlib.version_compare(rustc.version, '>= 1.67.0'): + verbatim = ',+verbatim' + else: + verbatim = '' + + for d in target.link_whole_targets: + linkdirs.add(d.subdir) + if d.uses_rust(): + # specify `extern CRATE_NAME=OUTPUT_FILE` for each Rust + # dependency, so that collisions with libraries in rustc's + # sysroot don't cause ambiguity + d_name = self._get_rust_dependency_name(target, d) + args += ['--extern', '{}={}'.format(d_name, os.path.join(d.subdir, d.filename))] + project_deps.append(RustDep(d_name, self.rust_crates[d.name].order)) + else: + if not crt_args_injected and not {'c', 'cpp', 'fortran'}.isdisjoint(d.compilers): + crt_args_injected = True + crt_args_injected = True + + if rustc.linker.id in {'link', 'lld-link'}: + if verbatim: + # If we can use the verbatim modifier, then everything is great + args += ['-l', f'static{whole_archive}{verbatim}={d.get_outputs()[0]}'] + elif isinstance(target, build.StaticLibrary): + # If we don't, for static libraries the only option is + # to make a copy, since we can't pass objects in, or + # directly affect the archiver. but we're not going to + # do that given how quickly rustc versions go out of + # support unless there's a compelling reason to do so. + # This only affects 1.61–1.66 + mlog.warning('Due to limitations in Rustc versions 1.61–1.66 and meson library naming', + 'whole-archive linking with MSVC may or may not work. Upgrade rustc to', + '>= 1.67. A best effort is being made, but likely won\'t work') + args += ['-l', f'static={d.name}'] + else: + # When doing dynamic linking (binaries and [c]dylibs), + # we can instead just proxy the correct arguments to the linker + for link_whole_arg in rustc.linker.get_link_whole_for([self.get_target_filename_for_linking(d)]): + args += ['-C', f'link-arg={link_whole_arg}'] + else: + args += ['-l', f'static{whole_archive}={d.name}'] + external_deps.extend(d.external_deps) + for e in external_deps: + for a in e.get_link_args(): + if a in rustc.native_static_libs: + # Exclude link args that rustc already add by default + continue + if a.endswith(('.dll', '.so', '.dylib')): + dir_, lib = os.path.split(a) + linkdirs.add(dir_) + lib, ext = os.path.splitext(lib) + if lib.startswith('lib'): + lib = lib[3:] + args.extend(['-l', f'dylib={lib}']) + elif a.startswith('-L'): + args.append(a) + elif a.startswith('-l'): + _type = 'static' if e.static else 'dylib' + args.extend(['-l', f'{_type}={a[2:]}']) + for d in linkdirs: + if d == '': + d = '.' + args += ['-L', d] + + # Because of the way rustc links, this must come after any potential + # library need to link with their stdlibs (C++ and Fortran, for example) + args.extend(target.get_used_stdlib_args('rust')) + + target_deps = target.get_dependencies() + has_shared_deps = any(isinstance(dep, build.SharedLibrary) for dep in target_deps) + has_rust_shared_deps = any(dep.uses_rust() + and dep.rust_crate_type == 'dylib' + for dep in target_deps) + + if cratetype in {'dylib', 'proc-macro'} or has_rust_shared_deps: + # add prefer-dynamic if any of the Rust libraries we link + # against are dynamic or this is a dynamic library itself, + # otherwise we'll end up with multiple implementations of libstd. + args += ['-C', 'prefer-dynamic'] + + if isinstance(target, build.SharedLibrary) or has_shared_deps: + # build the usual rpath arguments as well... + + # Set runtime-paths so we can run executables without needing to set + # LD_LIBRARY_PATH, etc in the environment. Doesn't work on Windows. + if has_path_sep(target.name): + # Target names really should not have slashes in them, but + # unfortunately we did not check for that and some downstream projects + # now have them. Once slashes are forbidden, remove this bit. + target_slashname_workaround_dir = os.path.join(os.path.dirname(target.name), + self.get_target_dir(target)) + else: + target_slashname_workaround_dir = self.get_target_dir(target) + rpath_args, target.rpath_dirs_to_remove = ( + rustc.build_rpath_args(self.environment, + self.environment.get_build_dir(), + target_slashname_workaround_dir, + self.determine_rpath_dirs(target), + target.build_rpath, + target.install_rpath)) + # ... but then add rustc's sysroot to account for rustup + # installations + for rpath_arg in rpath_args: + args += ['-C', 'link-arg=' + rpath_arg + ':' + os.path.join(rustc.get_sysroot(), 'lib')] + + proc_macro_dylib_path = None + if getattr(target, 'rust_crate_type', '') == 'proc-macro': + proc_macro_dylib_path = os.path.abspath(os.path.join(target.subdir, target.get_filename())) + + self._add_rust_project_entry(target.name, + os.path.abspath(os.path.join(self.environment.build_dir, main_rust_file)), + args, + bool(target.subproject), + proc_macro_dylib_path, + project_deps) + + compiler_name = self.compiler_to_rule_name(rustc) + element = NinjaBuildElement(self.all_outputs, target_name, compiler_name, main_rust_file) + if orderdeps: + element.add_orderdep(orderdeps) + if deps: + element.add_dep(deps) + element.add_item('ARGS', args) + element.add_item('targetdep', depfile) + element.add_item('cratetype', cratetype) + self.add_build(element) + if isinstance(target, build.SharedLibrary): + self.generate_shsym(target) + self.create_target_source_introspection(target, rustc, args, [main_rust_file], []) + + @staticmethod + def get_rule_suffix(for_machine: MachineChoice) -> str: + return PerMachine('_FOR_BUILD', '')[for_machine] + + @classmethod + def get_compiler_rule_name(cls, lang: str, for_machine: MachineChoice, mode: str = 'COMPILER') -> str: + return f'{lang}_{mode}{cls.get_rule_suffix(for_machine)}' + + @classmethod + def compiler_to_rule_name(cls, compiler: Compiler) -> str: + return cls.get_compiler_rule_name(compiler.get_language(), compiler.for_machine, compiler.mode) + + @classmethod + def compiler_to_pch_rule_name(cls, compiler: Compiler) -> str: + return cls.get_compiler_rule_name(compiler.get_language(), compiler.for_machine, 'PCH') + + def swift_module_file_name(self, target): + return os.path.join(self.get_target_private_dir(target), + self.target_swift_modulename(target) + '.swiftmodule') + + def target_swift_modulename(self, target): + return target.name + + def determine_swift_dep_modules(self, target): + result = [] + for l in target.link_targets: + if self.is_swift_target(l): + result.append(self.swift_module_file_name(l)) + return result + + def get_swift_link_deps(self, target): + result = [] + for l in target.link_targets: + result.append(self.get_target_filename(l)) + return result + + def split_swift_generated_sources(self, target): + all_srcs = self.get_target_generated_sources(target) + srcs = [] + others = [] + for i in all_srcs: + if i.endswith('.swift'): + srcs.append(i) + else: + others.append(i) + return srcs, others + + def generate_swift_target(self, target): + module_name = self.target_swift_modulename(target) + swiftc = target.compilers['swift'] + abssrc = [] + relsrc = [] + abs_headers = [] + header_imports = [] + for i in target.get_sources(): + if swiftc.can_compile(i): + rels = i.rel_to_builddir(self.build_to_src) + abss = os.path.normpath(os.path.join(self.environment.get_build_dir(), rels)) + relsrc.append(rels) + abssrc.append(abss) + elif self.environment.is_header(i): + relh = i.rel_to_builddir(self.build_to_src) + absh = os.path.normpath(os.path.join(self.environment.get_build_dir(), relh)) + abs_headers.append(absh) + header_imports += swiftc.get_header_import_args(absh) + else: + raise InvalidArguments(f'Swift target {target.get_basename()} contains a non-swift source file.') + os.makedirs(self.get_target_private_dir_abs(target), exist_ok=True) + compile_args = swiftc.get_compile_only_args() + compile_args += swiftc.get_optimization_args(target.get_option(OptionKey('optimization'))) + compile_args += swiftc.get_debug_args(target.get_option(OptionKey('debug'))) + compile_args += swiftc.get_module_args(module_name) + compile_args += self.build.get_project_args(swiftc, target.subproject, target.for_machine) + compile_args += self.build.get_global_args(swiftc, target.for_machine) + for i in reversed(target.get_include_dirs()): + basedir = i.get_curdir() + for d in i.get_incdirs(): + if d not in ('', '.'): + expdir = os.path.join(basedir, d) + else: + expdir = basedir + srctreedir = os.path.normpath(os.path.join(self.environment.get_build_dir(), self.build_to_src, expdir)) + sargs = swiftc.get_include_args(srctreedir, False) + compile_args += sargs + link_args = swiftc.get_output_args(os.path.join(self.environment.get_build_dir(), self.get_target_filename(target))) + link_args += self.build.get_project_link_args(swiftc, target.subproject, target.for_machine) + link_args += self.build.get_global_link_args(swiftc, target.for_machine) + rundir = self.get_target_private_dir(target) + out_module_name = self.swift_module_file_name(target) + in_module_files = self.determine_swift_dep_modules(target) + abs_module_dirs = self.determine_swift_dep_dirs(target) + module_includes = [] + for x in abs_module_dirs: + module_includes += swiftc.get_include_args(x, False) + link_deps = self.get_swift_link_deps(target) + abs_link_deps = [os.path.join(self.environment.get_build_dir(), x) for x in link_deps] + for d in target.link_targets: + reldir = self.get_target_dir(d) + if reldir == '': + reldir = '.' + link_args += ['-L', os.path.normpath(os.path.join(self.environment.get_build_dir(), reldir))] + (rel_generated, _) = self.split_swift_generated_sources(target) + abs_generated = [os.path.join(self.environment.get_build_dir(), x) for x in rel_generated] + # We need absolute paths because swiftc needs to be invoked in a subdir + # and this is the easiest way about it. + objects = [] # Relative to swift invocation dir + rel_objects = [] # Relative to build.ninja + for i in abssrc + abs_generated: + base = os.path.basename(i) + oname = os.path.splitext(base)[0] + '.o' + objects.append(oname) + rel_objects.append(os.path.join(self.get_target_private_dir(target), oname)) + + rulename = self.compiler_to_rule_name(swiftc) + + # Swiftc does not seem to be able to emit objects and module files in one go. + elem = NinjaBuildElement(self.all_outputs, rel_objects, rulename, abssrc) + elem.add_dep(in_module_files + rel_generated) + elem.add_dep(abs_headers) + elem.add_item('ARGS', compile_args + header_imports + abs_generated + module_includes) + elem.add_item('RUNDIR', rundir) + self.add_build(elem) + elem = NinjaBuildElement(self.all_outputs, out_module_name, rulename, abssrc) + elem.add_dep(in_module_files + rel_generated) + elem.add_item('ARGS', compile_args + abs_generated + module_includes + swiftc.get_mod_gen_args()) + elem.add_item('RUNDIR', rundir) + self.add_build(elem) + if isinstance(target, build.StaticLibrary): + elem = self.generate_link(target, self.get_target_filename(target), + rel_objects, self.build.static_linker[target.for_machine]) + self.add_build(elem) + elif isinstance(target, build.Executable): + elem = NinjaBuildElement(self.all_outputs, self.get_target_filename(target), rulename, []) + elem.add_dep(rel_objects) + elem.add_dep(link_deps) + elem.add_item('ARGS', link_args + swiftc.get_std_exe_link_args() + objects + abs_link_deps) + elem.add_item('RUNDIR', rundir) + self.add_build(elem) + else: + raise MesonException('Swift supports only executable and static library targets.') + # Introspection information + self.create_target_source_introspection(target, swiftc, compile_args + header_imports + module_includes, relsrc, rel_generated) + + def _rsp_options(self, tool: T.Union['Compiler', 'StaticLinker', 'DynamicLinker']) -> T.Dict[str, T.Union[bool, RSPFileSyntax]]: + """Helper method to get rsp options. + + rsp_file_syntax() is only guaranteed to be implemented if + can_linker_accept_rsp() returns True. + """ + options = {'rspable': tool.can_linker_accept_rsp()} + if options['rspable']: + options['rspfile_quote_style'] = tool.rsp_file_syntax() + return options + + def generate_static_link_rules(self): + num_pools = self.environment.coredata.options[OptionKey('backend_max_links')].value + if 'java' in self.environment.coredata.compilers.host: + self.generate_java_link() + for for_machine in MachineChoice: + static_linker = self.build.static_linker[for_machine] + if static_linker is None: + continue + rule = 'STATIC_LINKER{}'.format(self.get_rule_suffix(for_machine)) + cmdlist = [] + args = ['$in'] + # FIXME: Must normalize file names with pathlib.Path before writing + # them out to fix this properly on Windows. See: + # https://github.com/mesonbuild/meson/issues/1517 + # https://github.com/mesonbuild/meson/issues/1526 + if isinstance(static_linker, ArLikeLinker) and not mesonlib.is_windows(): + # `ar` has no options to overwrite archives. It always appends, + # which is never what we want. Delete an existing library first if + # it exists. https://github.com/mesonbuild/meson/issues/1355 + cmdlist = execute_wrapper + [c.format('$out') for c in rmfile_prefix] + cmdlist += static_linker.get_exelist() + cmdlist += ['$LINK_ARGS'] + cmdlist += NinjaCommandArg.list(static_linker.get_output_args('$out'), Quoting.none) + description = 'Linking static target $out' + if num_pools > 0: + pool = 'pool = link_pool' + else: + pool = None + + options = self._rsp_options(static_linker) + self.add_rule(NinjaRule(rule, cmdlist, args, description, **options, extra=pool)) + + def generate_dynamic_link_rules(self): + num_pools = self.environment.coredata.options[OptionKey('backend_max_links')].value + for for_machine in MachineChoice: + complist = self.environment.coredata.compilers[for_machine] + for langname, compiler in complist.items(): + if langname in {'java', 'vala', 'rust', 'cs', 'cython'}: + continue + rule = '{}_LINKER{}'.format(langname, self.get_rule_suffix(for_machine)) + command = compiler.get_linker_exelist() + args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_linker_output_args('$out'), Quoting.none) + ['$in', '$LINK_ARGS'] + description = 'Linking target $out' + if num_pools > 0: + pool = 'pool = link_pool' + else: + pool = None + + options = self._rsp_options(compiler) + self.add_rule(NinjaRule(rule, command, args, description, **options, extra=pool)) + if self.environment.machines[for_machine].is_aix(): + rule = 'AIX_LINKER{}'.format(self.get_rule_suffix(for_machine)) + description = 'Archiving AIX shared library' + cmdlist = compiler.get_command_to_archive_shlib() + args = [] + options = {} + self.add_rule(NinjaRule(rule, cmdlist, args, description, **options, extra=None)) + + args = self.environment.get_build_command() + \ + ['--internal', + 'symbolextractor', + self.environment.get_build_dir(), + '$in', + '$IMPLIB', + '$out'] + symrule = 'SHSYM' + symcmd = args + ['$CROSS'] + syndesc = 'Generating symbol file $out' + synstat = 'restat = 1' + self.add_rule(NinjaRule(symrule, symcmd, [], syndesc, extra=synstat)) + + def generate_java_compile_rule(self, compiler): + rule = self.compiler_to_rule_name(compiler) + command = compiler.get_exelist() + ['$ARGS', '$in'] + description = 'Compiling Java object $in' + self.add_rule(NinjaRule(rule, command, [], description)) + + def generate_cs_compile_rule(self, compiler: 'CsCompiler') -> None: + rule = self.compiler_to_rule_name(compiler) + command = compiler.get_exelist() + args = ['$ARGS', '$in'] + description = 'Compiling C Sharp target $out' + self.add_rule(NinjaRule(rule, command, args, description, + rspable=mesonlib.is_windows(), + rspfile_quote_style=compiler.rsp_file_syntax())) + + def generate_vala_compile_rules(self, compiler): + rule = self.compiler_to_rule_name(compiler) + command = compiler.get_exelist() + ['$ARGS', '$in'] + description = 'Compiling Vala source $in' + self.add_rule(NinjaRule(rule, command, [], description, extra='restat = 1')) + + def generate_cython_compile_rules(self, compiler: 'Compiler') -> None: + rule = self.compiler_to_rule_name(compiler) + description = 'Compiling Cython source $in' + command = compiler.get_exelist() + + depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE') + depfile = '$out.dep' if depargs else None + + args = depargs + ['$ARGS', '$in'] + args += NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + self.add_rule(NinjaRule(rule, command + args, [], + description, + depfile=depfile, + extra='restat = 1')) + + def generate_rust_compile_rules(self, compiler): + rule = self.compiler_to_rule_name(compiler) + command = compiler.get_exelist() + ['$ARGS', '$in'] + description = 'Compiling Rust source $in' + depfile = '$targetdep' + depstyle = 'gcc' + self.add_rule(NinjaRule(rule, command, [], description, deps=depstyle, + depfile=depfile)) + + def generate_swift_compile_rules(self, compiler): + rule = self.compiler_to_rule_name(compiler) + full_exe = self.environment.get_build_command() + [ + '--internal', + 'dirchanger', + '$RUNDIR', + ] + invoc = full_exe + compiler.get_exelist() + command = invoc + ['$ARGS', '$in'] + description = 'Compiling Swift source $in' + self.add_rule(NinjaRule(rule, command, [], description)) + + def use_dyndeps_for_fortran(self) -> bool: + '''Use the new Ninja feature for scanning dependencies during build, + rather than up front. Remove this and all old scanning code once Ninja + minimum version is bumped to 1.10.''' + return mesonlib.version_compare(self.ninja_version, '>=1.10.0') + + def generate_fortran_dep_hack(self, crstr: str) -> None: + if self.use_dyndeps_for_fortran(): + return + rule = f'FORTRAN_DEP_HACK{crstr}' + if mesonlib.is_windows(): + cmd = ['cmd', '/C'] + else: + cmd = ['true'] + self.add_rule_comment(NinjaComment('''Workaround for these issues: +https://groups.google.com/forum/#!topic/ninja-build/j-2RfBIOd_8 +https://gcc.gnu.org/bugzilla/show_bug.cgi?id=47485''')) + self.add_rule(NinjaRule(rule, cmd, [], 'Dep hack', extra='restat = 1')) + + def generate_llvm_ir_compile_rule(self, compiler): + if self.created_llvm_ir_rule[compiler.for_machine]: + return + rule = self.get_compiler_rule_name('llvm_ir', compiler.for_machine) + command = compiler.get_exelist() + args = ['$ARGS'] + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in'] + description = 'Compiling LLVM IR object $in' + + options = self._rsp_options(compiler) + + self.add_rule(NinjaRule(rule, command, args, description, **options)) + self.created_llvm_ir_rule[compiler.for_machine] = True + + def generate_compile_rule_for(self, langname, compiler): + if langname == 'java': + self.generate_java_compile_rule(compiler) + return + if langname == 'cs': + if self.environment.machines.matches_build_machine(compiler.for_machine): + self.generate_cs_compile_rule(compiler) + return + if langname == 'vala': + self.generate_vala_compile_rules(compiler) + return + if langname == 'rust': + self.generate_rust_compile_rules(compiler) + return + if langname == 'swift': + if self.environment.machines.matches_build_machine(compiler.for_machine): + self.generate_swift_compile_rules(compiler) + return + if langname == 'cython': + self.generate_cython_compile_rules(compiler) + return + crstr = self.get_rule_suffix(compiler.for_machine) + options = self._rsp_options(compiler) + if langname == 'fortran': + self.generate_fortran_dep_hack(crstr) + # gfortran does not update the modification time of *.mod files, therefore restat is needed. + # See also: https://github.com/ninja-build/ninja/pull/2275 + options['extra'] = 'restat = 1' + rule = self.compiler_to_rule_name(compiler) + depargs = NinjaCommandArg.list(compiler.get_dependency_gen_args('$out', '$DEPFILE'), Quoting.none) + command = compiler.get_exelist() + args = ['$ARGS'] + depargs + NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + compiler.get_compile_only_args() + ['$in'] + description = f'Compiling {compiler.get_display_language()} object $out' + if compiler.get_argument_syntax() == 'msvc': + deps = 'msvc' + depfile = None + else: + deps = 'gcc' + depfile = '$DEPFILE' + self.add_rule(NinjaRule(rule, command, args, description, **options, + deps=deps, depfile=depfile)) + + def generate_pch_rule_for(self, langname, compiler): + if langname not in {'c', 'cpp'}: + return + rule = self.compiler_to_pch_rule_name(compiler) + depargs = compiler.get_dependency_gen_args('$out', '$DEPFILE') + + if compiler.get_argument_syntax() == 'msvc': + output = [] + else: + output = NinjaCommandArg.list(compiler.get_output_args('$out'), Quoting.none) + + if 'mwcc' in compiler.id: + output[0].s = '-precompile' + command = compiler.get_exelist() + ['$ARGS'] + depargs + output + ['$in'] # '-c' must be removed + else: + command = compiler.get_exelist() + ['$ARGS'] + depargs + output + compiler.get_compile_only_args() + ['$in'] + description = 'Precompiling header $in' + if compiler.get_argument_syntax() == 'msvc': + deps = 'msvc' + depfile = None + else: + deps = 'gcc' + depfile = '$DEPFILE' + self.add_rule(NinjaRule(rule, command, [], description, deps=deps, + depfile=depfile)) + + def generate_scanner_rules(self): + rulename = 'depscan' + if rulename in self.ruledict: + # Scanning command is the same for native and cross compilation. + return + command = self.environment.get_build_command() + \ + ['--internal', 'depscan'] + args = ['$picklefile', '$out', '$in'] + description = 'Module scanner.' + rule = NinjaRule(rulename, command, args, description) + self.add_rule(rule) + + def generate_compile_rules(self): + for for_machine in MachineChoice: + clist = self.environment.coredata.compilers[for_machine] + for langname, compiler in clist.items(): + if compiler.get_id() == 'clang': + self.generate_llvm_ir_compile_rule(compiler) + self.generate_compile_rule_for(langname, compiler) + self.generate_pch_rule_for(langname, compiler) + for mode in compiler.get_modes(): + self.generate_compile_rule_for(langname, mode) + + def generate_generator_list_rules(self, target): + # CustomTargets have already written their rules and + # CustomTargetIndexes don't actually get generated, so write rules for + # GeneratedLists here + for genlist in target.get_generated_sources(): + if isinstance(genlist, (build.CustomTarget, build.CustomTargetIndex)): + continue + self.generate_genlist_for_target(genlist, target) + + def replace_paths(self, target, args, override_subdir=None): + if override_subdir: + source_target_dir = os.path.join(self.build_to_src, override_subdir) + else: + source_target_dir = self.get_target_source_dir(target) + relout = self.get_target_private_dir(target) + args = [x.replace("@SOURCE_DIR@", self.build_to_src).replace("@BUILD_DIR@", relout) + for x in args] + args = [x.replace("@CURRENT_SOURCE_DIR@", source_target_dir) for x in args] + args = [x.replace("@SOURCE_ROOT@", self.build_to_src).replace("@BUILD_ROOT@", '.') + for x in args] + args = [x.replace('\\', '/') for x in args] + return args + + def generate_genlist_for_target(self, genlist: build.GeneratedList, target: build.BuildTarget) -> None: + for x in genlist.depends: + if isinstance(x, build.GeneratedList): + self.generate_genlist_for_target(x, target) + generator = genlist.get_generator() + subdir = genlist.subdir + exe = generator.get_exe() + infilelist = genlist.get_inputs() + outfilelist = genlist.get_outputs() + extra_dependencies = self.get_custom_target_depend_files(genlist) + for i, curfile in enumerate(infilelist): + if len(generator.outputs) == 1: + sole_output = os.path.join(self.get_target_private_dir(target), outfilelist[i]) + else: + sole_output = f'{curfile}' + infilename = curfile.rel_to_builddir(self.build_to_src, self.get_target_private_dir(target)) + base_args = generator.get_arglist(infilename) + outfiles = genlist.get_outputs_for(curfile) + outfiles = [os.path.join(self.get_target_private_dir(target), of) for of in outfiles] + if generator.depfile is None: + rulename = 'CUSTOM_COMMAND' + args = base_args + else: + rulename = 'CUSTOM_COMMAND_DEP' + depfilename = generator.get_dep_outname(infilename) + depfile = os.path.join(self.get_target_private_dir(target), depfilename) + args = [x.replace('@DEPFILE@', depfile) for x in base_args] + args = [x.replace("@INPUT@", infilename).replace('@OUTPUT@', sole_output) + for x in args] + args = self.replace_outputs(args, self.get_target_private_dir(target), outfilelist) + # We have consumed output files, so drop them from the list of remaining outputs. + if len(generator.outputs) > 1: + outfilelist = outfilelist[len(generator.outputs):] + args = self.replace_paths(target, args, override_subdir=subdir) + cmdlist, reason = self.as_meson_exe_cmdline(exe, + self.replace_extra_args(args, genlist), + capture=outfiles[0] if generator.capture else None) + abs_pdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target)) + os.makedirs(abs_pdir, exist_ok=True) + + elem = NinjaBuildElement(self.all_outputs, outfiles, rulename, infilename) + elem.add_dep([self.get_target_filename(x) for x in generator.depends]) + if generator.depfile is not None: + elem.add_item('DEPFILE', depfile) + if len(extra_dependencies) > 0: + elem.add_dep(extra_dependencies) + + if len(generator.outputs) == 1: + what = f'{sole_output!r}' + else: + # since there are multiple outputs, we log the source that caused the rebuild + what = f'from {sole_output!r}' + if reason: + reason = f' (wrapped by meson {reason})' + elem.add_item('DESC', f'Generating {what}{reason}') + + if isinstance(exe, build.BuildTarget): + elem.add_dep(self.get_target_filename(exe)) + elem.add_item('COMMAND', cmdlist) + self.add_build(elem) + + def scan_fortran_module_outputs(self, target): + """ + Find all module and submodule made available in a Fortran code file. + """ + if self.use_dyndeps_for_fortran(): + return + compiler = None + # TODO other compilers + for lang, c in self.environment.coredata.compilers.host.items(): + if lang == 'fortran': + compiler = c + break + if compiler is None: + self.fortran_deps[target.get_basename()] = {} + return + + modre = re.compile(FORTRAN_MODULE_PAT, re.IGNORECASE) + submodre = re.compile(FORTRAN_SUBMOD_PAT, re.IGNORECASE) + module_files = {} + submodule_files = {} + for s in target.get_sources(): + # FIXME, does not work for Fortran sources generated by + # custom_target() and generator() as those are run after + # the configuration (configure_file() is OK) + if not compiler.can_compile(s): + continue + filename = s.absolute_path(self.environment.get_source_dir(), + self.environment.get_build_dir()) + # Fortran keywords must be ASCII. + with open(filename, encoding='ascii', errors='ignore') as f: + for line in f: + modmatch = modre.match(line) + if modmatch is not None: + modname = modmatch.group(1).lower() + if modname in module_files: + raise InvalidArguments( + f'Namespace collision: module {modname} defined in ' + f'two files {module_files[modname]} and {s}.') + module_files[modname] = s + else: + submodmatch = submodre.match(line) + if submodmatch is not None: + # '_' is arbitrarily used to distinguish submod from mod. + parents = submodmatch.group(1).lower().split(':') + submodname = parents[0] + '_' + submodmatch.group(2).lower() + + if submodname in submodule_files: + raise InvalidArguments( + f'Namespace collision: submodule {submodname} defined in ' + f'two files {submodule_files[submodname]} and {s}.') + submodule_files[submodname] = s + + self.fortran_deps[target.get_basename()] = {**module_files, **submodule_files} + + def get_fortran_deps(self, compiler: FortranCompiler, src: Path, target) -> T.List[str]: + """ + Find all module and submodule needed by a Fortran target + """ + if self.use_dyndeps_for_fortran(): + return [] + + dirname = Path(self.get_target_private_dir(target)) + tdeps = self.fortran_deps[target.get_basename()] + srcdir = Path(self.source_dir) + + mod_files = _scan_fortran_file_deps(src, srcdir, dirname, tdeps, compiler) + return mod_files + + def get_no_stdlib_link_args(self, target, linker): + if hasattr(linker, 'language') and linker.language in self.build.stdlibs[target.for_machine]: + return linker.get_no_stdlib_link_args() + return [] + + def get_compile_debugfile_args(self, compiler, target, objfile): + # The way MSVC uses PDB files is documented exactly nowhere so + # the following is what we have been able to decipher via + # reverse engineering. + # + # Each object file gets the path of its PDB file written + # inside it. This can be either the final PDB (for, say, + # foo.exe) or an object pdb (for foo.obj). If the former, then + # each compilation step locks the pdb file for writing, which + # is a bottleneck and object files from one target cannot be + # used in a different target. The latter seems to be the + # sensible one (and what Unix does) but there is a catch. If + # you try to use precompiled headers MSVC will error out + # because both source and pch pdbs go in the same file and + # they must be the same. + # + # This means: + # + # - pch files must be compiled anew for every object file (negating + # the entire point of having them in the first place) + # - when using pch, output must go to the target pdb + # + # Since both of these are broken in some way, use the one that + # works for each target. This unfortunately means that you + # can't combine pch and object extraction in a single target. + # + # PDB files also lead to filename collisions. A target foo.exe + # has a corresponding foo.pdb. A shared library foo.dll _also_ + # has pdb file called foo.pdb. So will a static library + # foo.lib, which clobbers both foo.pdb _and_ the dll file's + # export library called foo.lib (by default, currently we name + # them libfoo.a to avoid this issue). You can give the files + # unique names such as foo_exe.pdb but VC also generates a + # bunch of other files which take their names from the target + # basename (i.e. "foo") and stomp on each other. + # + # CMake solves this problem by doing two things. First of all + # static libraries do not generate pdb files at + # all. Presumably you don't need them and VC is smart enough + # to look up the original data when linking (speculation, not + # tested). The second solution is that you can only have + # target named "foo" as an exe, shared lib _or_ static + # lib. This makes filename collisions not happen. The downside + # is that you can't have an executable foo that uses a shared + # library libfoo.so, which is a common idiom on Unix. + # + # If you feel that the above is completely wrong and all of + # this is actually doable, please send patches. + + if target.has_pch(): + tfilename = self.get_target_filename_abs(target) + return compiler.get_compile_debugfile_args(tfilename, pch=True) + else: + return compiler.get_compile_debugfile_args(objfile, pch=False) + + def get_link_debugfile_name(self, linker, target, outname) -> T.Optional[str]: + return linker.get_link_debugfile_name(outname) + + def get_link_debugfile_args(self, linker, target, outname): + return linker.get_link_debugfile_args(outname) + + def generate_llvm_ir_compile(self, target, src): + base_proxy = target.get_options() + compiler = get_compiler_for_source(target.compilers.values(), src) + commands = compiler.compiler_args() + # Compiler args for compiling this target + commands += compilers.get_base_compile_args(base_proxy, compiler) + if isinstance(src, File): + if src.is_built: + src_filename = os.path.join(src.subdir, src.fname) + else: + src_filename = src.fname + elif os.path.isabs(src): + src_filename = os.path.basename(src) + else: + src_filename = src + obj_basename = self.canonicalize_filename(src_filename) + rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename) + rel_obj += '.' + self.environment.machines[target.for_machine].get_object_suffix() + commands += self.get_compile_debugfile_args(compiler, target, rel_obj) + if isinstance(src, File) and src.is_built: + rel_src = src.fname + elif isinstance(src, File): + rel_src = src.rel_to_builddir(self.build_to_src) + else: + raise InvalidArguments(f'Invalid source type: {src!r}') + # Write the Ninja build command + compiler_name = self.get_compiler_rule_name('llvm_ir', compiler.for_machine) + element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src) + element.add_item('ARGS', commands) + self.add_build(element) + return (rel_obj, rel_src) + + @lru_cache(maxsize=None) + def generate_inc_dir(self, compiler: 'Compiler', d: str, basedir: str, is_system: bool) -> \ + T.Tuple['ImmutableListProtocol[str]', 'ImmutableListProtocol[str]']: + # Avoid superfluous '/.' at the end of paths when d is '.' + if d not in ('', '.'): + expdir = os.path.normpath(os.path.join(basedir, d)) + else: + expdir = basedir + srctreedir = os.path.normpath(os.path.join(self.build_to_src, expdir)) + sargs = compiler.get_include_args(srctreedir, is_system) + # There may be include dirs where a build directory has not been + # created for some source dir. For example if someone does this: + # + # inc = include_directories('foo/bar/baz') + # + # But never subdir()s into the actual dir. + if os.path.isdir(os.path.join(self.environment.get_build_dir(), expdir)): + bargs = compiler.get_include_args(expdir, is_system) + else: + bargs = [] + return (sargs, bargs) + + def _generate_single_compile(self, target: build.BuildTarget, compiler: 'Compiler', + is_generated: bool = False) -> 'CompilerArgs': + commands = self._generate_single_compile_base_args(target, compiler) + commands += self._generate_single_compile_target_args(target, compiler, is_generated) + return commands + + def _generate_single_compile_base_args(self, target: build.BuildTarget, compiler: 'Compiler') -> 'CompilerArgs': + base_proxy = target.get_options() + # Create an empty commands list, and start adding arguments from + # various sources in the order in which they must override each other + commands = compiler.compiler_args() + # Start with symbol visibility. + commands += compiler.gnu_symbol_visibility_args(target.gnu_symbol_visibility) + # Add compiler args for compiling this target derived from 'base' build + # options passed on the command-line, in default_options, etc. + # These have the lowest priority. + commands += compilers.get_base_compile_args(base_proxy, + compiler) + return commands + + @lru_cache(maxsize=None) + def _generate_single_compile_target_args(self, target: build.BuildTarget, compiler: 'Compiler', + is_generated: bool = False) -> 'ImmutableListProtocol[str]': + # The code generated by valac is usually crap and has tons of unused + # variables and such, so disable warnings for Vala C sources. + no_warn_args = is_generated == 'vala' + # Add compiler args and include paths from several sources; defaults, + # build options, external dependencies, etc. + commands = self.generate_basic_compiler_args(target, compiler, no_warn_args) + # Add custom target dirs as includes automatically, but before + # target-specific include directories. + if target.implicit_include_directories: + commands += self.get_custom_target_dir_include_args(target, compiler) + # Add include dirs from the `include_directories:` kwarg on the target + # and from `include_directories:` of internal deps of the target. + # + # Target include dirs should override internal deps include dirs. + # This is handled in BuildTarget.process_kwargs() + # + # Include dirs from internal deps should override include dirs from + # external deps and must maintain the order in which they are specified. + # Hence, we must reverse the list so that the order is preserved. + for i in reversed(target.get_include_dirs()): + basedir = i.get_curdir() + # We should iterate include dirs in reversed orders because + # -Ipath will add to begin of array. And without reverse + # flags will be added in reversed order. + for d in reversed(i.get_incdirs()): + # Add source subdir first so that the build subdir overrides it + (compile_obj, includeargs) = self.generate_inc_dir(compiler, d, basedir, i.is_system) + commands += compile_obj + commands += includeargs + for d in i.get_extra_build_dirs(): + commands += compiler.get_include_args(d, i.is_system) + # Add per-target compile args, f.ex, `c_args : ['-DFOO']`. We set these + # near the end since these are supposed to override everything else. + commands += self.escape_extra_args(target.get_extra_args(compiler.get_language())) + + # D specific additional flags + if compiler.language == 'd': + commands += compiler.get_feature_args(target.d_features, self.build_to_src) + + # Add source dir and build dir. Project-specific and target-specific + # include paths must override per-target compile args, include paths + # from external dependencies, internal dependencies, and from + # per-target `include_directories:` + # + # We prefer headers in the build dir over the source dir since, for + # instance, the user might have an srcdir == builddir Autotools build + # in their source tree. Many projects that are moving to Meson have + # both Meson and Autotools in parallel as part of the transition. + if target.implicit_include_directories: + commands += self.get_source_dir_include_args(target, compiler) + if target.implicit_include_directories: + commands += self.get_build_dir_include_args(target, compiler) + # Finally add the private dir for the target to the include path. This + # must override everything else and must be the final path added. + commands += compiler.get_include_args(self.get_target_private_dir(target), False) + return commands + + # Returns a dictionary, mapping from each compiler src type (e.g. 'c', 'cpp', etc.) to a list of compiler arg strings + # used for that respective src type. + # Currently used for the purpose of populating VisualStudio intellisense fields but possibly useful in other scenarios. + def generate_common_compile_args_per_src_type(self, target: build.BuildTarget) -> dict[str, list[str]]: + src_type_to_args = {} + + use_pch = self.target_uses_pch(target) + + for src_type_str in target.compilers.keys(): + compiler = target.compilers[src_type_str] + commands = self._generate_single_compile_base_args(target, compiler) + + # Include PCH header as first thing as it must be the first one or it will be + # ignored by gcc https://gcc.gnu.org/bugzilla/show_bug.cgi?id=100462 + if use_pch and 'mw' not in compiler.id: + commands += self.get_pch_include_args(compiler, target) + + commands += self._generate_single_compile_target_args(target, compiler, is_generated=False) + + # Metrowerks compilers require PCH include args to come after intraprocedural analysis args + if use_pch and 'mw' in compiler.id: + commands += self.get_pch_include_args(compiler, target) + + commands = commands.compiler.compiler_args(commands) + + src_type_to_args[src_type_str] = commands.to_native() + return src_type_to_args + + def generate_single_compile(self, target: build.BuildTarget, src, + is_generated=False, header_deps=None, + order_deps: T.Optional[T.List['mesonlib.FileOrString']] = None, + extra_args: T.Optional[T.List[str]] = None, + unity_sources: T.Optional[T.List[mesonlib.FileOrString]] = None) -> None: + """ + Compiles C/C++, ObjC/ObjC++, Fortran, and D sources + """ + header_deps = header_deps if header_deps is not None else [] + order_deps = order_deps if order_deps is not None else [] + + if isinstance(src, str) and src.endswith('.h'): + raise AssertionError(f'BUG: sources should not contain headers {src!r}') + + compiler = get_compiler_for_source(target.compilers.values(), src) + commands = self._generate_single_compile_base_args(target, compiler) + + # Include PCH header as first thing as it must be the first one or it will be + # ignored by gcc https://gcc.gnu.org/bugzilla/show_bug.cgi?id=100462 + use_pch = self.target_uses_pch(target) and is_generated != 'pch' + if use_pch and 'mw' not in compiler.id: + commands += self.get_pch_include_args(compiler, target) + + commands += self._generate_single_compile_target_args(target, compiler, is_generated) + + # Metrowerks compilers require PCH include args to come after intraprocedural analysis args + if use_pch and 'mw' in compiler.id: + commands += self.get_pch_include_args(compiler, target) + + commands = commands.compiler.compiler_args(commands) + + # Create introspection information + if is_generated is False: + self.create_target_source_introspection(target, compiler, commands, [src], [], unity_sources) + else: + self.create_target_source_introspection(target, compiler, commands, [], [src], unity_sources) + + build_dir = self.environment.get_build_dir() + if isinstance(src, File): + rel_src = src.rel_to_builddir(self.build_to_src) + if os.path.isabs(rel_src): + # Source files may not be from the source directory if they originate in source-only libraries, + # so we can't assert that the absolute path is anywhere in particular. + if src.is_built: + assert rel_src.startswith(build_dir) + rel_src = rel_src[len(build_dir) + 1:] + elif is_generated: + raise AssertionError(f'BUG: broken generated source file handling for {src!r}') + else: + raise InvalidArguments(f'Invalid source type: {src!r}') + obj_basename = self.object_filename_from_source(target, src) + rel_obj = os.path.join(self.get_target_private_dir(target), obj_basename) + dep_file = compiler.depfile_for_object(rel_obj) + + # Add MSVC debug file generation compile flags: /Fd /FS + commands += self.get_compile_debugfile_args(compiler, target, rel_obj) + + # PCH handling + if self.target_uses_pch(target): + pchlist = target.get_pch(compiler.language) + else: + pchlist = [] + if not pchlist: + pch_dep = [] + elif compiler.id == 'intel': + pch_dep = [] + else: + arr = [] + i = os.path.join(self.get_target_private_dir(target), compiler.get_pch_name(pchlist[0])) + arr.append(i) + pch_dep = arr + + compiler_name = self.compiler_to_rule_name(compiler) + extra_deps = [] + if compiler.get_language() == 'fortran': + # Can't read source file to scan for deps if it's generated later + # at build-time. Skip scanning for deps, and just set the module + # outdir argument instead. + # https://github.com/mesonbuild/meson/issues/1348 + if not is_generated: + abs_src = Path(build_dir) / rel_src + extra_deps += self.get_fortran_deps(compiler, abs_src, target) + if not self.use_dyndeps_for_fortran(): + # Dependency hack. Remove once multiple outputs in Ninja is fixed: + # https://groups.google.com/forum/#!topic/ninja-build/j-2RfBIOd_8 + for modname, srcfile in self.fortran_deps[target.get_basename()].items(): + modfile = os.path.join(self.get_target_private_dir(target), + compiler.module_name_to_filename(modname)) + + if srcfile == src: + crstr = self.get_rule_suffix(target.for_machine) + depelem = NinjaBuildElement(self.all_outputs, + modfile, + 'FORTRAN_DEP_HACK' + crstr, + rel_obj) + self.add_build(depelem) + commands += compiler.get_module_outdir_args(self.get_target_private_dir(target)) + if extra_args is not None: + commands.extend(extra_args) + + element = NinjaBuildElement(self.all_outputs, rel_obj, compiler_name, rel_src) + self.add_header_deps(target, element, header_deps) + for d in extra_deps: + element.add_dep(d) + for d in order_deps: + if isinstance(d, File): + d = d.rel_to_builddir(self.build_to_src) + elif not self.has_dir_part(d): + d = os.path.join(self.get_target_private_dir(target), d) + element.add_orderdep(d) + element.add_dep(pch_dep) + for i in self.get_fortran_orderdeps(target, compiler): + element.add_orderdep(i) + if dep_file: + element.add_item('DEPFILE', dep_file) + element.add_item('ARGS', commands) + + self.add_dependency_scanner_entries_to_element(target, compiler, element, src) + self.add_build(element) + assert isinstance(rel_obj, str) + assert isinstance(rel_src, str) + return (rel_obj, rel_src.replace('\\', '/')) + + def add_dependency_scanner_entries_to_element(self, target, compiler, element, src): + if not self.should_use_dyndeps_for_target(target): + return + if isinstance(target, build.CompileTarget): + return + extension = os.path.splitext(src.fname)[1][1:] + if extension != 'C': + extension = extension.lower() + if not (extension in compilers.lang_suffixes['fortran'] or extension in compilers.lang_suffixes['cpp']): + return + dep_scan_file = self.get_dep_scan_file_for(target) + element.add_item('dyndep', dep_scan_file) + element.add_orderdep(dep_scan_file) + + def get_dep_scan_file_for(self, target): + return os.path.join(self.get_target_private_dir(target), 'depscan.dd') + + def add_header_deps(self, target, ninja_element, header_deps): + for d in header_deps: + if isinstance(d, File): + d = d.rel_to_builddir(self.build_to_src) + elif not self.has_dir_part(d): + d = os.path.join(self.get_target_private_dir(target), d) + ninja_element.add_dep(d) + + def has_dir_part(self, fname): + # FIXME FIXME: The usage of this is a terrible and unreliable hack + if isinstance(fname, File): + return fname.subdir != '' + return has_path_sep(fname) + + # Fortran is a bit weird (again). When you link against a library, just compiling a source file + # requires the mod files that are output when single files are built. To do this right we would need to + # scan all inputs and write out explicit deps for each file. That is stoo slow and too much effort so + # instead just have an ordered dependency on the library. This ensures all required mod files are created. + # The real deps are then detected via dep file generation from the compiler. This breaks on compilers that + # produce incorrect dep files but such is life. + def get_fortran_orderdeps(self, target, compiler): + if compiler.language != 'fortran': + return [] + return [ + os.path.join(self.get_target_dir(lt), lt.get_filename()) + for lt in itertools.chain(target.link_targets, target.link_whole_targets) + ] + + def generate_msvc_pch_command(self, target, compiler, pch): + header = pch[0] + pchname = compiler.get_pch_name(header) + dst = os.path.join(self.get_target_private_dir(target), pchname) + + commands = [] + commands += self.generate_basic_compiler_args(target, compiler) + + if len(pch) == 1: + # Auto generate PCH. + source = self.create_msvc_pch_implementation(target, compiler.get_language(), pch[0]) + pch_header_dir = os.path.dirname(os.path.join(self.build_to_src, target.get_source_subdir(), header)) + commands += compiler.get_include_args(pch_header_dir, False) + else: + source = os.path.join(self.build_to_src, target.get_source_subdir(), pch[1]) + + just_name = os.path.basename(header) + (objname, pch_args) = compiler.gen_pch_args(just_name, source, dst) + commands += pch_args + commands += self._generate_single_compile(target, compiler) + commands += self.get_compile_debugfile_args(compiler, target, objname) + dep = dst + '.' + compiler.get_depfile_suffix() + return commands, dep, dst, [objname], source + + def generate_gcc_pch_command(self, target, compiler, pch): + commands = self._generate_single_compile(target, compiler) + if pch.split('.')[-1] == 'h' and compiler.language == 'cpp': + # Explicitly compile pch headers as C++. If Clang is invoked in C++ mode, it actually warns if + # this option is not set, and for gcc it also makes sense to use it. + commands += ['-x', 'c++-header'] + dst = os.path.join(self.get_target_private_dir(target), + os.path.basename(pch) + '.' + compiler.get_pch_suffix()) + dep = dst + '.' + compiler.get_depfile_suffix() + return commands, dep, dst, [] # Gcc does not create an object file during pch generation. + + def generate_mwcc_pch_command(self, target, compiler, pch): + commands = self._generate_single_compile(target, compiler) + dst = os.path.join(self.get_target_private_dir(target), + os.path.basename(pch) + '.' + compiler.get_pch_suffix()) + dep = os.path.splitext(dst)[0] + '.' + compiler.get_depfile_suffix() + return commands, dep, dst, [] # mwcc compilers do not create an object file during pch generation. + + def generate_pch(self, target, header_deps=None): + header_deps = header_deps if header_deps is not None else [] + pch_objects = [] + for lang in ['c', 'cpp']: + pch = target.get_pch(lang) + if not pch: + continue + if not has_path_sep(pch[0]) or not has_path_sep(pch[-1]): + msg = f'Precompiled header of {target.get_basename()!r} must not be in the same ' \ + 'directory as source, please put it in a subdirectory.' + raise InvalidArguments(msg) + compiler: Compiler = target.compilers[lang] + if compiler.get_argument_syntax() == 'msvc': + (commands, dep, dst, objs, src) = self.generate_msvc_pch_command(target, compiler, pch) + extradep = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0]) + elif compiler.id == 'intel': + # Intel generates on target generation + continue + elif 'mwcc' in compiler.id: + src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0]) + (commands, dep, dst, objs) = self.generate_mwcc_pch_command(target, compiler, pch[0]) + extradep = None + else: + src = os.path.join(self.build_to_src, target.get_source_subdir(), pch[0]) + (commands, dep, dst, objs) = self.generate_gcc_pch_command(target, compiler, pch[0]) + extradep = None + pch_objects += objs + rulename = self.compiler_to_pch_rule_name(compiler) + elem = NinjaBuildElement(self.all_outputs, objs + [dst], rulename, src) + if extradep is not None: + elem.add_dep(extradep) + self.add_header_deps(target, elem, header_deps) + elem.add_item('ARGS', commands) + elem.add_item('DEPFILE', dep) + self.add_build(elem) + return pch_objects + + def get_target_shsym_filename(self, target): + # Always name the .symbols file after the primary build output because it always exists + targetdir = self.get_target_private_dir(target) + return os.path.join(targetdir, target.get_filename() + '.symbols') + + def generate_shsym(self, target): + target_file = self.get_target_filename(target) + symname = self.get_target_shsym_filename(target) + elem = NinjaBuildElement(self.all_outputs, symname, 'SHSYM', target_file) + # The library we will actually link to, which is an import library on Windows (not the DLL) + elem.add_item('IMPLIB', self.get_target_filename_for_linking(target)) + if self.environment.is_cross_build(): + elem.add_item('CROSS', '--cross-host=' + self.environment.machines[target.for_machine].system) + self.add_build(elem) + + def get_import_filename(self, target): + return os.path.join(self.get_target_dir(target), target.import_filename) + + def get_target_type_link_args(self, target, linker): + commands = [] + if isinstance(target, build.Executable): + # Currently only used with the Swift compiler to add '-emit-executable' + commands += linker.get_std_exe_link_args() + # If export_dynamic, add the appropriate linker arguments + if target.export_dynamic: + commands += linker.gen_export_dynamic_link_args(self.environment) + # If implib, and that's significant on this platform (i.e. Windows using either GCC or Visual Studio) + if target.import_filename: + commands += linker.gen_import_library_args(self.get_import_filename(target)) + if target.pie: + commands += linker.get_pie_link_args() + elif isinstance(target, build.SharedLibrary): + if isinstance(target, build.SharedModule): + commands += linker.get_std_shared_module_link_args(target.get_options()) + else: + commands += linker.get_std_shared_lib_link_args() + # All shared libraries are PIC + commands += linker.get_pic_args() + if not isinstance(target, build.SharedModule) or target.force_soname: + # Add -Wl,-soname arguments on Linux, -install_name on OS X + commands += linker.get_soname_args( + self.environment, target.prefix, target.name, target.suffix, + target.soversion, target.darwin_versions) + # This is only visited when building for Windows using either GCC or Visual Studio + if target.vs_module_defs and hasattr(linker, 'gen_vs_module_defs_args'): + commands += linker.gen_vs_module_defs_args(target.vs_module_defs.rel_to_builddir(self.build_to_src)) + # This is only visited when building for Windows using either GCC or Visual Studio + if target.import_filename: + commands += linker.gen_import_library_args(self.get_import_filename(target)) + elif isinstance(target, build.StaticLibrary): + commands += linker.get_std_link_args(self.environment, not target.should_install()) + else: + raise RuntimeError('Unknown build target type.') + return commands + + def get_target_type_link_args_post_dependencies(self, target, linker): + commands = [] + if isinstance(target, build.Executable): + # If gui_app is significant on this platform, add the appropriate linker arguments. + # Unfortunately this can't be done in get_target_type_link_args, because some misguided + # libraries (such as SDL2) add -mwindows to their link flags. + m = self.environment.machines[target.for_machine] + + if m.is_windows() or m.is_cygwin(): + if target.gui_app is not None: + commands += linker.get_gui_app_args(target.gui_app) + else: + commands += linker.get_win_subsystem_args(target.win_subsystem) + return commands + + def get_link_whole_args(self, linker, target): + use_custom = False + if linker.id == 'msvc': + # Expand our object lists manually if we are on pre-Visual Studio 2015 Update 2 + # (incidentally, the "linker" here actually refers to cl.exe) + if mesonlib.version_compare(linker.version, '<19.00.23918'): + use_custom = True + + if use_custom: + objects_from_static_libs: T.List[ExtractedObjects] = [] + for dep in target.link_whole_targets: + l = dep.extract_all_objects(False) + objects_from_static_libs += self.determine_ext_objs(l, '') + objects_from_static_libs.extend(self.flatten_object_list(dep)[0]) + + return objects_from_static_libs + else: + target_args = self.build_target_link_arguments(linker, target.link_whole_targets) + return linker.get_link_whole_for(target_args) if target_args else [] + + @lru_cache(maxsize=None) + def guess_library_absolute_path(self, linker, libname, search_dirs, patterns) -> Path: + from ..compilers.c import CCompiler + for d in search_dirs: + for p in patterns: + trial = CCompiler._get_trials_from_pattern(p, d, libname) + if not trial: + continue + trial = CCompiler._get_file_from_list(self.environment, trial) + if not trial: + continue + # Return the first result + return trial + + def guess_external_link_dependencies(self, linker, target, commands, internal): + # Ideally the linker would generate dependency information that could be used. + # But that has 2 problems: + # * currently ld cannot create dependency information in a way that ninja can use: + # https://sourceware.org/bugzilla/show_bug.cgi?id=22843 + # * Meson optimizes libraries from the same build using the symbol extractor. + # Just letting ninja use ld generated dependencies would undo this optimization. + search_dirs = OrderedSet() + libs = OrderedSet() + absolute_libs = [] + + build_dir = self.environment.get_build_dir() + # the following loop sometimes consumes two items from command in one pass + it = iter(linker.native_args_to_unix(commands)) + for item in it: + if item in internal and not item.startswith('-'): + continue + + if item.startswith('-L'): + if len(item) > 2: + path = item[2:] + else: + try: + path = next(it) + except StopIteration: + mlog.warning("Generated linker command has -L argument without following path") + break + if not os.path.isabs(path): + path = os.path.join(build_dir, path) + search_dirs.add(path) + elif item.startswith('-l'): + if len(item) > 2: + lib = item[2:] + else: + try: + lib = next(it) + except StopIteration: + mlog.warning("Generated linker command has '-l' argument without following library name") + break + libs.add(lib) + elif os.path.isabs(item) and self.environment.is_library(item) and os.path.isfile(item): + absolute_libs.append(item) + + guessed_dependencies = [] + # TODO The get_library_naming requirement currently excludes link targets that use d or fortran as their main linker + try: + static_patterns = linker.get_library_naming(self.environment, LibType.STATIC, strict=True) + shared_patterns = linker.get_library_naming(self.environment, LibType.SHARED, strict=True) + search_dirs = tuple(search_dirs) + tuple(linker.get_library_dirs(self.environment)) + for libname in libs: + # be conservative and record most likely shared and static resolution, because we don't know exactly + # which one the linker will prefer + staticlibs = self.guess_library_absolute_path(linker, libname, + search_dirs, static_patterns) + sharedlibs = self.guess_library_absolute_path(linker, libname, + search_dirs, shared_patterns) + if staticlibs: + guessed_dependencies.append(staticlibs.resolve().as_posix()) + if sharedlibs: + guessed_dependencies.append(sharedlibs.resolve().as_posix()) + except (mesonlib.MesonException, AttributeError) as e: + if 'get_library_naming' not in str(e): + raise + + return guessed_dependencies + absolute_libs + + def generate_prelink(self, target, obj_list): + assert isinstance(target, build.StaticLibrary) + prelink_name = os.path.join(self.get_target_private_dir(target), target.name + '-prelink.o') + elem = NinjaBuildElement(self.all_outputs, [prelink_name], 'CUSTOM_COMMAND', obj_list) + + prelinker = target.get_prelinker() + cmd = prelinker.exelist[:] + cmd += prelinker.get_prelink_args(prelink_name, obj_list) + + cmd = self.replace_paths(target, cmd) + elem.add_item('COMMAND', cmd) + elem.add_item('description', f'Prelinking {prelink_name}.') + self.add_build(elem) + return [prelink_name] + + def generate_link(self, target: build.BuildTarget, outname, obj_list, linker: T.Union['Compiler', 'StaticLinker'], extra_args=None, stdlib_args=None): + extra_args = extra_args if extra_args is not None else [] + stdlib_args = stdlib_args if stdlib_args is not None else [] + implicit_outs = [] + if isinstance(target, build.StaticLibrary): + linker_base = 'STATIC' + else: + linker_base = linker.get_language() # Fixme. + if isinstance(target, build.SharedLibrary): + self.generate_shsym(target) + crstr = self.get_rule_suffix(target.for_machine) + linker_rule = linker_base + '_LINKER' + crstr + # Create an empty commands list, and start adding link arguments from + # various sources in the order in which they must override each other + # starting from hard-coded defaults followed by build options and so on. + # + # Once all the linker options have been passed, we will start passing + # libraries and library paths from internal and external sources. + commands = linker.compiler_args() + # First, the trivial ones that are impossible to override. + # + # Add linker args for linking this target derived from 'base' build + # options passed on the command-line, in default_options, etc. + # These have the lowest priority. + if isinstance(target, build.StaticLibrary): + commands += linker.get_base_link_args(target.get_options()) + else: + commands += compilers.get_base_link_args(target.get_options(), + linker, + isinstance(target, build.SharedModule), + self.environment.get_build_dir()) + # Add -nostdlib if needed; can't be overridden + commands += self.get_no_stdlib_link_args(target, linker) + # Add things like /NOLOGO; usually can't be overridden + commands += linker.get_linker_always_args() + # Add buildtype linker args: optimization level, etc. + commands += linker.get_buildtype_linker_args(target.get_option(OptionKey('buildtype'))) + # Add /DEBUG and the pdb filename when using MSVC + if target.get_option(OptionKey('debug')): + commands += self.get_link_debugfile_args(linker, target, outname) + debugfile = self.get_link_debugfile_name(linker, target, outname) + if debugfile is not None: + implicit_outs += [debugfile] + # Add link args specific to this BuildTarget type, such as soname args, + # PIC, import library generation, etc. + commands += self.get_target_type_link_args(target, linker) + # Archives that are copied wholesale in the result. Must be before any + # other link targets so missing symbols from whole archives are found in those. + if not isinstance(target, build.StaticLibrary): + commands += self.get_link_whole_args(linker, target) + + if not isinstance(target, build.StaticLibrary): + # Add link args added using add_project_link_arguments() + commands += self.build.get_project_link_args(linker, target.subproject, target.for_machine) + # Add link args added using add_global_link_arguments() + # These override per-project link arguments + commands += self.build.get_global_link_args(linker, target.for_machine) + # Link args added from the env: LDFLAGS. We want these to override + # all the defaults but not the per-target link args. + commands += self.environment.coredata.get_external_link_args(target.for_machine, linker.get_language()) + + # Now we will add libraries and library paths from various sources + + # Set runtime-paths so we can run executables without needing to set + # LD_LIBRARY_PATH, etc in the environment. Doesn't work on Windows. + if has_path_sep(target.name): + # Target names really should not have slashes in them, but + # unfortunately we did not check for that and some downstream projects + # now have them. Once slashes are forbidden, remove this bit. + target_slashname_workaround_dir = os.path.join( + os.path.dirname(target.name), + self.get_target_dir(target)) + else: + target_slashname_workaround_dir = self.get_target_dir(target) + (rpath_args, target.rpath_dirs_to_remove) = ( + linker.build_rpath_args(self.environment, + self.environment.get_build_dir(), + target_slashname_workaround_dir, + self.determine_rpath_dirs(target), + target.build_rpath, + target.install_rpath)) + commands += rpath_args + + # Add link args to link to all internal libraries (link_with:) and + # internal dependencies needed by this target. + if linker_base == 'STATIC': + # Link arguments of static libraries are not put in the command + # line of the library. They are instead appended to the command + # line where the static library is used. + dependencies = [] + else: + dependencies = target.get_dependencies() + internal = self.build_target_link_arguments(linker, dependencies) + #In AIX since shared libraries are archived the dependencies must + #depend on .a file with the .so and not directly on the .so file. + if self.environment.machines[target.for_machine].is_aix(): + for i, val in enumerate(internal): + internal[i] = linker.get_archive_name(val) + commands += internal + # Only non-static built targets need link args and link dependencies + if not isinstance(target, build.StaticLibrary): + # For 'automagic' deps: Boost and GTest. Also dependency('threads'). + # pkg-config puts the thread flags itself via `Cflags:` + + commands += linker.get_target_link_args(target) + # External deps must be last because target link libraries may depend on them. + for dep in target.get_external_deps(): + # Extend without reordering or de-dup to preserve `-L -l` sets + # https://github.com/mesonbuild/meson/issues/1718 + commands.extend_preserving_lflags(linker.get_dependency_link_args(dep)) + for d in target.get_dependencies(): + if isinstance(d, build.StaticLibrary): + for dep in d.get_external_deps(): + commands.extend_preserving_lflags(linker.get_dependency_link_args(dep)) + + # Add link args specific to this BuildTarget type that must not be overridden by dependencies + commands += self.get_target_type_link_args_post_dependencies(target, linker) + + # Add link args for c_* or cpp_* build options. Currently this only + # adds c_winlibs and cpp_winlibs when building for Windows. This needs + # to be after all internal and external libraries so that unresolved + # symbols from those can be found here. This is needed when the + # *_winlibs that we want to link to are static mingw64 libraries. + if isinstance(linker, Compiler): + # The static linker doesn't know what language it is building, so we + # don't know what option. Fortunately, it doesn't care to see the + # language-specific options either. + # + # We shouldn't check whether we are making a static library, because + # in the LTO case we do use a real compiler here. + commands += linker.get_option_link_args(target.get_options()) + + dep_targets = [] + dep_targets.extend(self.guess_external_link_dependencies(linker, target, commands, internal)) + + # Add libraries generated by custom targets + custom_target_libraries = self.get_custom_target_provided_libraries(target) + commands += extra_args + commands += custom_target_libraries + commands += stdlib_args # Standard library arguments go last, because they never depend on anything. + dep_targets.extend([self.get_dependency_filename(t) for t in dependencies]) + dep_targets.extend([self.get_dependency_filename(t) + for t in target.link_depends]) + elem = NinjaBuildElement(self.all_outputs, outname, linker_rule, obj_list, implicit_outs=implicit_outs) + elem.add_dep(dep_targets + custom_target_libraries) + elem.add_item('LINK_ARGS', commands) + self.create_target_linker_introspection(target, linker, commands) + return elem + + def get_dependency_filename(self, t): + if isinstance(t, build.SharedLibrary): + return self.get_target_shsym_filename(t) + elif isinstance(t, mesonlib.File): + if t.is_built: + return t.relative_name() + else: + return t.absolute_path(self.environment.get_source_dir(), + self.environment.get_build_dir()) + return self.get_target_filename(t) + + def generate_shlib_aliases(self, target, outdir): + for alias, to, tag in target.get_aliases(): + aliasfile = os.path.join(outdir, alias) + abs_aliasfile = os.path.join(self.environment.get_build_dir(), outdir, alias) + try: + os.remove(abs_aliasfile) + except Exception: + pass + try: + os.symlink(to, abs_aliasfile) + except NotImplementedError: + mlog.debug("Library versioning disabled because symlinks are not supported.") + except OSError: + mlog.debug("Library versioning disabled because we do not have symlink creation privileges.") + else: + self.implicit_meson_outs.append(aliasfile) + + def generate_custom_target_clean(self, trees: T.List[str]) -> str: + e = self.create_phony_target(self.all_outputs, 'clean-ctlist', 'CUSTOM_COMMAND', 'PHONY') + d = CleanTrees(self.environment.get_build_dir(), trees) + d_file = os.path.join(self.environment.get_scratch_dir(), 'cleantrees.dat') + e.add_item('COMMAND', self.environment.get_build_command() + ['--internal', 'cleantrees', d_file]) + e.add_item('description', 'Cleaning custom target directories') + self.add_build(e) + # Write out the data file passed to the script + with open(d_file, 'wb') as ofile: + pickle.dump(d, ofile) + return 'clean-ctlist' + + def generate_gcov_clean(self): + gcno_elem = self.create_phony_target(self.all_outputs, 'clean-gcno', 'CUSTOM_COMMAND', 'PHONY') + gcno_elem.add_item('COMMAND', mesonlib.get_meson_command() + ['--internal', 'delwithsuffix', '.', 'gcno']) + gcno_elem.add_item('description', 'Deleting gcno files') + self.add_build(gcno_elem) + + gcda_elem = self.create_phony_target(self.all_outputs, 'clean-gcda', 'CUSTOM_COMMAND', 'PHONY') + gcda_elem.add_item('COMMAND', mesonlib.get_meson_command() + ['--internal', 'delwithsuffix', '.', 'gcda']) + gcda_elem.add_item('description', 'Deleting gcda files') + self.add_build(gcda_elem) + + def get_user_option_args(self): + cmds = [] + for (k, v) in self.environment.coredata.options.items(): + if k.is_project(): + cmds.append('-D' + str(k) + '=' + (v.value if isinstance(v.value, str) else str(v.value).lower())) + # The order of these arguments must be the same between runs of Meson + # to ensure reproducible output. The order we pass them shouldn't + # affect behavior in any other way. + return sorted(cmds) + + def generate_dist(self): + elem = self.create_phony_target(self.all_outputs, 'dist', 'CUSTOM_COMMAND', 'PHONY') + elem.add_item('DESC', 'Creating source packages') + elem.add_item('COMMAND', self.environment.get_build_command() + ['dist']) + elem.add_item('pool', 'console') + self.add_build(elem) + + def generate_scanbuild(self): + if not environment.detect_scanbuild(): + return + if 'scan-build' in self.all_outputs: + return + cmd = self.environment.get_build_command() + \ + ['--internal', 'scanbuild', self.environment.source_dir, self.environment.build_dir] + \ + self.environment.get_build_command() + self.get_user_option_args() + elem = self.create_phony_target(self.all_outputs, 'scan-build', 'CUSTOM_COMMAND', 'PHONY') + elem.add_item('COMMAND', cmd) + elem.add_item('pool', 'console') + self.add_build(elem) + + def generate_clangtool(self, name, extra_arg=None): + target_name = 'clang-' + name + extra_args = [] + if extra_arg: + target_name += f'-{extra_arg}' + extra_args.append(f'--{extra_arg}') + if not os.path.exists(os.path.join(self.environment.source_dir, '.clang-' + name)) and \ + not os.path.exists(os.path.join(self.environment.source_dir, '_clang-' + name)): + return + if target_name in self.all_outputs: + return + cmd = self.environment.get_build_command() + \ + ['--internal', 'clang' + name, self.environment.source_dir, self.environment.build_dir] + \ + extra_args + elem = self.create_phony_target(self.all_outputs, target_name, 'CUSTOM_COMMAND', 'PHONY') + elem.add_item('COMMAND', cmd) + elem.add_item('pool', 'console') + self.add_build(elem) + + def generate_clangformat(self): + if not environment.detect_clangformat(): + return + self.generate_clangtool('format') + self.generate_clangtool('format', 'check') + + def generate_clangtidy(self): + import shutil + if not shutil.which('clang-tidy'): + return + self.generate_clangtool('tidy') + + def generate_tags(self, tool, target_name): + import shutil + if not shutil.which(tool): + return + if target_name in self.all_outputs: + return + cmd = self.environment.get_build_command() + \ + ['--internal', 'tags', tool, self.environment.source_dir] + elem = self.create_phony_target(self.all_outputs, target_name, 'CUSTOM_COMMAND', 'PHONY') + elem.add_item('COMMAND', cmd) + elem.add_item('pool', 'console') + self.add_build(elem) + + # For things like scan-build and other helper tools we might have. + def generate_utils(self): + self.generate_scanbuild() + self.generate_clangformat() + self.generate_clangtidy() + self.generate_tags('etags', 'TAGS') + self.generate_tags('ctags', 'ctags') + self.generate_tags('cscope', 'cscope') + cmd = self.environment.get_build_command() + ['--internal', 'uninstall'] + elem = self.create_phony_target(self.all_outputs, 'uninstall', 'CUSTOM_COMMAND', 'PHONY') + elem.add_item('COMMAND', cmd) + elem.add_item('pool', 'console') + self.add_build(elem) + + def generate_ending(self): + for targ, deps in [ + ('all', self.get_build_by_default_targets()), + ('meson-test-prereq', self.get_testlike_targets()), + ('meson-benchmark-prereq', self.get_testlike_targets(True))]: + targetlist = [] + # These must also be built by default. + # XXX: Sometime in the future these should be built only before running tests. + if targ == 'all': + targetlist.extend(['meson-test-prereq', 'meson-benchmark-prereq']) + for t in deps.values(): + # Add the first output of each target to the 'all' target so that + # they are all built + #Add archive file if shared library in AIX for build all. + if isinstance(t, build.SharedLibrary): + if self.environment.machines[t.for_machine].is_aix(): + linker, stdlib_args = self.determine_linker_and_stdlib_args(t) + t.get_outputs()[0] = linker.get_archive_name(t.get_outputs()[0]) + targetlist.append(os.path.join(self.get_target_dir(t), t.get_outputs()[0])) + + elem = NinjaBuildElement(self.all_outputs, targ, 'phony', targetlist) + self.add_build(elem) + + elem = self.create_phony_target(self.all_outputs, 'clean', 'CUSTOM_COMMAND', 'PHONY') + elem.add_item('COMMAND', self.ninja_command + ['-t', 'clean']) + elem.add_item('description', 'Cleaning') + + # If we have custom targets in this project, add all their outputs to + # the list that is passed to the `cleantrees.py` script. The script + # will manually delete all custom_target outputs that are directories + # instead of files. This is needed because on platforms other than + # Windows, Ninja only deletes directories while cleaning if they are + # empty. https://github.com/mesonbuild/meson/issues/1220 + ctlist = [] + for t in self.build.get_targets().values(): + if isinstance(t, build.CustomTarget): + # Create a list of all custom target outputs + for o in t.get_outputs(): + ctlist.append(os.path.join(self.get_target_dir(t), o)) + if ctlist: + elem.add_dep(self.generate_custom_target_clean(ctlist)) + + if OptionKey('b_coverage') in self.environment.coredata.options and \ + self.environment.coredata.options[OptionKey('b_coverage')].value: + self.generate_gcov_clean() + elem.add_dep('clean-gcda') + elem.add_dep('clean-gcno') + self.add_build(elem) + + deps = self.get_regen_filelist() + elem = NinjaBuildElement(self.all_outputs, 'build.ninja', 'REGENERATE_BUILD', deps) + elem.add_item('pool', 'console') + self.add_build(elem) + + # If these files used to be explicitly created, they need to appear on the build graph somehow, + # otherwise cleandead deletes them. See https://github.com/ninja-build/ninja/issues/2299 + if self.implicit_meson_outs: + elem = NinjaBuildElement(self.all_outputs, 'meson-implicit-outs', 'phony', self.implicit_meson_outs) + self.add_build(elem) + + elem = NinjaBuildElement(self.all_outputs, 'reconfigure', 'REGENERATE_BUILD', 'PHONY') + elem.add_item('pool', 'console') + self.add_build(elem) + + elem = NinjaBuildElement(self.all_outputs, deps, 'phony', '') + self.add_build(elem) + + def get_introspection_data(self, target_id: str, target: build.Target) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]: + data = self.introspection_data.get(target_id) + if not data: + return super().get_introspection_data(target_id, target) + + return list(data.values()) + + +def _scan_fortran_file_deps(src: Path, srcdir: Path, dirname: Path, tdeps, compiler) -> T.List[str]: + """ + scan a Fortran file for dependencies. Needs to be distinct from target + to allow for recursion induced by `include` statements.er + + It makes a number of assumptions, including + + * `use`, `module`, `submodule` name is not on a continuation line + + Regex + ----- + + * `incre` works for `#include "foo.f90"` and `include "foo.f90"` + * `usere` works for legacy and Fortran 2003 `use` statements + * `submodre` is for Fortran >= 2008 `submodule` + """ + + incre = re.compile(FORTRAN_INCLUDE_PAT, re.IGNORECASE) + usere = re.compile(FORTRAN_USE_PAT, re.IGNORECASE) + submodre = re.compile(FORTRAN_SUBMOD_PAT, re.IGNORECASE) + + mod_files = [] + src = Path(src) + with src.open(encoding='ascii', errors='ignore') as f: + for line in f: + # included files + incmatch = incre.match(line) + if incmatch is not None: + incfile = src.parent / incmatch.group(1) + # NOTE: src.parent is most general, in particular for CMake subproject with Fortran file + # having an `include 'foo.f'` statement. + if incfile.suffix.lower()[1:] in compiler.file_suffixes: + mod_files.extend(_scan_fortran_file_deps(incfile, srcdir, dirname, tdeps, compiler)) + # modules + usematch = usere.match(line) + if usematch is not None: + usename = usematch.group(1).lower() + if usename == 'intrinsic': # this keeps the regex simpler + continue + if usename not in tdeps: + # The module is not provided by any source file. This + # is due to: + # a) missing file/typo/etc + # b) using a module provided by the compiler, such as + # OpenMP + # There's no easy way to tell which is which (that I + # know of) so just ignore this and go on. Ideally we + # would print a warning message to the user but this is + # a common occurrence, which would lead to lots of + # distracting noise. + continue + srcfile = srcdir / tdeps[usename].fname # type: Path + if not srcfile.is_file(): + if srcfile.name != src.name: # generated source file + pass + else: # subproject + continue + elif srcfile.samefile(src): # self-reference + continue + + mod_name = compiler.module_name_to_filename(usename) + mod_files.append(str(dirname / mod_name)) + else: # submodules + submodmatch = submodre.match(line) + if submodmatch is not None: + parents = submodmatch.group(1).lower().split(':') + assert len(parents) in {1, 2}, ( + 'submodule ancestry must be specified as' + f' ancestor:parent but Meson found {parents}') + + ancestor_child = '_'.join(parents) + if ancestor_child not in tdeps: + raise MesonException("submodule {} relies on ancestor module {} that was not found.".format(submodmatch.group(2).lower(), ancestor_child.split('_', maxsplit=1)[0])) + submodsrcfile = srcdir / tdeps[ancestor_child].fname # type: Path + if not submodsrcfile.is_file(): + if submodsrcfile.name != src.name: # generated source file + pass + else: # subproject + continue + elif submodsrcfile.samefile(src): # self-reference + continue + mod_name = compiler.module_name_to_filename(ancestor_child) + mod_files.append(str(dirname / mod_name)) + return mod_files diff --git a/vendored-meson/meson/mesonbuild/backend/nonebackend.py b/vendored-meson/meson/mesonbuild/backend/nonebackend.py new file mode 100644 index 000000000000..35ec9582e593 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/backend/nonebackend.py @@ -0,0 +1,39 @@ +# Copyright 2022 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import typing as T + +from .backends import Backend +from .. import mlog +from ..mesonlib import MesonBugException + + +class NoneBackend(Backend): + + name = 'none' + + def generate(self, capture: bool = False, vslite_ctx: dict = None) -> T.Optional[dict]: + # Check for (currently) unexpected capture arg use cases - + if capture: + raise MesonBugException('We do not expect the none backend to generate with \'capture = True\'') + if vslite_ctx: + raise MesonBugException('We do not expect the none backend to be given a valid \'vslite_ctx\'') + + if self.build.get_targets(): + raise MesonBugException('None backend cannot generate target rules, but should have failed earlier.') + mlog.log('Generating simple install-only backend') + self.serialize_tests() + self.create_install_data_files() diff --git a/vendored-meson/meson/mesonbuild/backend/vs2010backend.py b/vendored-meson/meson/mesonbuild/backend/vs2010backend.py new file mode 100644 index 000000000000..340c1a5478bf --- /dev/null +++ b/vendored-meson/meson/mesonbuild/backend/vs2010backend.py @@ -0,0 +1,2118 @@ +# Copyright 2014-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations +import copy +import itertools +import os +import xml.dom.minidom +import xml.etree.ElementTree as ET +import uuid +import typing as T +from pathlib import Path, PurePath +import re +from collections import Counter + +from . import backends +from .. import build +from .. import mlog +from .. import compilers +from .. import mesonlib +from ..mesonlib import ( + File, MesonBugException, MesonException, replace_if_different, OptionKey, version_compare, MachineChoice +) +from ..environment import Environment, build_filename +from .. import coredata + +if T.TYPE_CHECKING: + from ..arglist import CompilerArgs + from ..interpreter import Interpreter + + Project = T.Tuple[str, Path, str, MachineChoice] + +def autodetect_vs_version(build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]) -> backends.Backend: + vs_version = os.getenv('VisualStudioVersion', None) + vs_install_dir = os.getenv('VSINSTALLDIR', None) + if not vs_install_dir: + raise MesonException('Could not detect Visual Studio: Environment variable VSINSTALLDIR is not set!\n' + 'Are you running meson from the Visual Studio Developer Command Prompt?') + # VisualStudioVersion is set since Visual Studio 11.0, but sometimes + # vcvarsall.bat doesn't set it, so also use VSINSTALLDIR + if vs_version == '11.0' or 'Visual Studio 11' in vs_install_dir: + from mesonbuild.backend.vs2012backend import Vs2012Backend + return Vs2012Backend(build, interpreter) + if vs_version == '12.0' or 'Visual Studio 12' in vs_install_dir: + from mesonbuild.backend.vs2013backend import Vs2013Backend + return Vs2013Backend(build, interpreter) + if vs_version == '14.0' or 'Visual Studio 14' in vs_install_dir: + from mesonbuild.backend.vs2015backend import Vs2015Backend + return Vs2015Backend(build, interpreter) + if vs_version == '15.0' or 'Visual Studio 17' in vs_install_dir or \ + 'Visual Studio\\2017' in vs_install_dir: + from mesonbuild.backend.vs2017backend import Vs2017Backend + return Vs2017Backend(build, interpreter) + if vs_version == '16.0' or 'Visual Studio 19' in vs_install_dir or \ + 'Visual Studio\\2019' in vs_install_dir: + from mesonbuild.backend.vs2019backend import Vs2019Backend + return Vs2019Backend(build, interpreter) + if vs_version == '17.0' or 'Visual Studio 22' in vs_install_dir or \ + 'Visual Studio\\2022' in vs_install_dir: + from mesonbuild.backend.vs2022backend import Vs2022Backend + return Vs2022Backend(build, interpreter) + if 'Visual Studio 10.0' in vs_install_dir: + return Vs2010Backend(build, interpreter) + raise MesonException('Could not detect Visual Studio using VisualStudioVersion: {!r} or VSINSTALLDIR: {!r}!\n' + 'Please specify the exact backend to use.'.format(vs_version, vs_install_dir)) + + +def split_o_flags_args(args: T.List[str]) -> T.List[str]: + """ + Splits any /O args and returns them. Does not take care of flags overriding + previous ones. Skips non-O flag arguments. + + ['/Ox', '/Ob1'] returns ['/Ox', '/Ob1'] + ['/Oxj', '/MP'] returns ['/Ox', '/Oj'] + """ + o_flags = [] + for arg in args: + if not arg.startswith('/O'): + continue + flags = list(arg[2:]) + # Assume that this one can't be clumped with the others since it takes + # an argument itself + if 'b' in flags: + o_flags.append(arg) + else: + o_flags += ['/O' + f for f in flags] + return o_flags + +def generate_guid_from_path(path, path_type) -> str: + return str(uuid.uuid5(uuid.NAMESPACE_URL, 'meson-vs-' + path_type + ':' + str(path))).upper() + +def detect_microsoft_gdk(platform: str) -> bool: + return re.match(r'Gaming\.(Desktop|Xbox.XboxOne|Xbox.Scarlett)\.x64', platform, re.IGNORECASE) + +def filtered_src_langs_generator(sources: T.List[str]): + for src in sources: + ext = src.split('.')[-1] + if compilers.compilers.is_source_suffix(ext): + yield compilers.compilers.SUFFIX_TO_LANG[ext] + +# Returns the source language (i.e. a key from 'lang_suffixes') of the most frequent source language in the given +# list of sources. +# We choose the most frequent language as 'primary' because it means the most sources in a target/project can +# simply refer to the project's shared intellisense define and include fields, rather than have to fill out their +# own duplicate full set of defines/includes/opts intellisense fields. All of which helps keep the vcxproj file +# size down. +def get_primary_source_lang(target_sources: T.List[File], custom_sources: T.List[str]) -> T.Optional[str]: + lang_counts = Counter([compilers.compilers.SUFFIX_TO_LANG[src.suffix] for src in target_sources if compilers.compilers.is_source_suffix(src.suffix)]) + lang_counts += Counter(filtered_src_langs_generator(custom_sources)) + most_common_lang_list = lang_counts.most_common(1) + # It may be possible that we have a target with no actual src files of interest (e.g. a generator target), + # leaving us with an empty list, which we should handle - + return most_common_lang_list[0][0] if most_common_lang_list else None + +# Returns a dictionary (by [src type][build type]) that contains a tuple of - +# (pre-processor defines, include paths, additional compiler options) +# fields to use to fill in the respective intellisense fields of sources that can't simply +# reference and re-use the shared 'primary' language intellisense fields of the vcxproj. +def get_non_primary_lang_intellisense_fields(vslite_ctx: dict, + target_id: str, + primary_src_lang: str) -> T.Dict[str, T.Dict[str, T.Tuple[str, str, str]]]: + defs_paths_opts_per_lang_and_buildtype = {} + for buildtype in coredata.get_genvs_default_buildtype_list(): + captured_build_args = vslite_ctx[buildtype][target_id] # Results in a 'Src types to compile args' dict + non_primary_build_args_per_src_lang = [(lang, build_args) for lang, build_args in captured_build_args.items() if lang != primary_src_lang] # Only need to individually populate intellisense fields for sources of non-primary types. + for src_lang, args_list in non_primary_build_args_per_src_lang: + if src_lang not in defs_paths_opts_per_lang_and_buildtype: + defs_paths_opts_per_lang_and_buildtype[src_lang] = {} + defs = Vs2010Backend.extract_nmake_preprocessor_defs(args_list) + paths = Vs2010Backend.extract_nmake_include_paths(args_list) + opts = Vs2010Backend.extract_intellisense_additional_compiler_options(args_list) + defs_paths_opts_per_lang_and_buildtype[src_lang][buildtype] = (defs, paths, opts) + return defs_paths_opts_per_lang_and_buildtype + +class Vs2010Backend(backends.Backend): + + name = 'vs2010' + + def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter], gen_lite: bool = False): + super().__init__(build, interpreter) + self.project_file_version = '10.0.30319.1' + self.sln_file_version = '11.00' + self.sln_version_comment = '2010' + self.platform_toolset = None + self.vs_version = '2010' + self.windows_target_platform_version = None + self.subdirs = {} + self.handled_target_deps = {} + self.gen_lite = gen_lite # Synonymous with generating the simpler makefile-style multi-config projects that invoke 'meson compile' builds, avoiding native MSBuild complications + + def get_target_private_dir(self, target): + return os.path.join(self.get_target_dir(target), target.get_id()) + + def generate_genlist_for_target(self, genlist: T.Union[build.GeneratedList, build.CustomTarget, build.CustomTargetIndex], target: build.BuildTarget, parent_node: ET.Element, generator_output_files: T.List[str], custom_target_include_dirs: T.List[str], custom_target_output_files: T.List[str]) -> None: + if isinstance(genlist, build.GeneratedList): + for x in genlist.depends: + self.generate_genlist_for_target(x, target, parent_node, [], [], []) + target_private_dir = self.relpath(self.get_target_private_dir(target), self.get_target_dir(target)) + down = self.target_to_build_root(target) + if isinstance(genlist, (build.CustomTarget, build.CustomTargetIndex)): + for i in genlist.get_outputs(): + # Path to the generated source from the current vcxproj dir via the build root + ipath = os.path.join(down, self.get_target_dir(genlist), i) + custom_target_output_files.append(ipath) + idir = self.relpath(self.get_target_dir(genlist), self.get_target_dir(target)) + if idir not in custom_target_include_dirs: + custom_target_include_dirs.append(idir) + else: + generator = genlist.get_generator() + exe = generator.get_exe() + infilelist = genlist.get_inputs() + outfilelist = genlist.get_outputs() + source_dir = os.path.join(down, self.build_to_src, genlist.subdir) + idgroup = ET.SubElement(parent_node, 'ItemGroup') + samelen = len(infilelist) == len(outfilelist) + for i, curfile in enumerate(infilelist): + if samelen: + sole_output = os.path.join(target_private_dir, outfilelist[i]) + else: + sole_output = '' + infilename = os.path.join(down, curfile.rel_to_builddir(self.build_to_src, target_private_dir)) + deps = self.get_custom_target_depend_files(genlist, True) + base_args = generator.get_arglist(infilename) + outfiles_rel = genlist.get_outputs_for(curfile) + outfiles = [os.path.join(target_private_dir, of) for of in outfiles_rel] + generator_output_files += outfiles + args = [x.replace("@INPUT@", infilename).replace('@OUTPUT@', sole_output) + for x in base_args] + args = self.replace_outputs(args, target_private_dir, outfiles_rel) + args = [x.replace("@SOURCE_DIR@", self.environment.get_source_dir()) + .replace("@BUILD_DIR@", target_private_dir) + for x in args] + args = [x.replace("@CURRENT_SOURCE_DIR@", source_dir) for x in args] + args = [x.replace("@SOURCE_ROOT@", self.environment.get_source_dir()) + .replace("@BUILD_ROOT@", self.environment.get_build_dir()) + for x in args] + args = [x.replace('\\', '/') for x in args] + # Always use a wrapper because MSBuild eats random characters when + # there are many arguments. + tdir_abs = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target)) + cmd, _ = self.as_meson_exe_cmdline( + exe, + self.replace_extra_args(args, genlist), + workdir=tdir_abs, + capture=outfiles[0] if generator.capture else None, + force_serialize=True + ) + deps = cmd[-1:] + deps + abs_pdir = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target)) + os.makedirs(abs_pdir, exist_ok=True) + cbs = ET.SubElement(idgroup, 'CustomBuild', Include=infilename) + ET.SubElement(cbs, 'Command').text = ' '.join(self.quote_arguments(cmd)) + ET.SubElement(cbs, 'Outputs').text = ';'.join(outfiles) + ET.SubElement(cbs, 'AdditionalInputs').text = ';'.join(deps) + + def generate_custom_generator_commands(self, target, parent_node): + generator_output_files = [] + custom_target_include_dirs = [] + custom_target_output_files = [] + for genlist in target.get_generated_sources(): + self.generate_genlist_for_target(genlist, target, parent_node, generator_output_files, custom_target_include_dirs, custom_target_output_files) + return generator_output_files, custom_target_output_files, custom_target_include_dirs + + def generate(self, + capture: bool = False, + vslite_ctx: dict = None) -> T.Optional[dict]: + # Check for (currently) unexpected capture arg use cases - + if capture: + raise MesonBugException('We do not expect any vs backend to generate with \'capture = True\'') + target_machine = self.interpreter.builtin['target_machine'].cpu_family_method(None, None) + if target_machine in {'64', 'x86_64'}: + # amd64 or x86_64 + target_system = self.interpreter.builtin['target_machine'].system_method(None, None) + if detect_microsoft_gdk(target_system): + self.platform = target_system + else: + self.platform = 'x64' + elif target_machine == 'x86': + # x86 + self.platform = 'Win32' + elif target_machine in {'aarch64', 'arm64'}: + target_cpu = self.interpreter.builtin['target_machine'].cpu_method(None, None) + if target_cpu == 'arm64ec': + self.platform = 'arm64ec' + else: + self.platform = 'arm64' + elif 'arm' in target_machine.lower(): + self.platform = 'ARM' + else: + raise MesonException('Unsupported Visual Studio platform: ' + target_machine) + + build_machine = self.interpreter.builtin['build_machine'].cpu_family_method(None, None) + if build_machine in {'64', 'x86_64'}: + # amd64 or x86_64 + self.build_platform = 'x64' + elif build_machine == 'x86': + # x86 + self.build_platform = 'Win32' + elif build_machine in {'aarch64', 'arm64'}: + target_cpu = self.interpreter.builtin['build_machine'].cpu_method(None, None) + if target_cpu == 'arm64ec': + self.build_platform = 'arm64ec' + else: + self.build_platform = 'arm64' + elif 'arm' in build_machine.lower(): + self.build_platform = 'ARM' + else: + raise MesonException('Unsupported Visual Studio platform: ' + build_machine) + + self.buildtype = self.environment.coredata.get_option(OptionKey('buildtype')) + self.optimization = self.environment.coredata.get_option(OptionKey('optimization')) + self.debug = self.environment.coredata.get_option(OptionKey('debug')) + try: + self.sanitize = self.environment.coredata.get_option(OptionKey('b_sanitize')) + except MesonException: + self.sanitize = 'none' + sln_filename = os.path.join(self.environment.get_build_dir(), self.build.project_name + '.sln') + projlist = self.generate_projects(vslite_ctx) + self.gen_testproj() + self.gen_installproj() + self.gen_regenproj() + self.generate_solution(sln_filename, projlist) + self.generate_regen_info() + Vs2010Backend.touch_regen_timestamp(self.environment.get_build_dir()) + + @staticmethod + def get_regen_stampfile(build_dir: str) -> None: + return os.path.join(os.path.join(build_dir, Environment.private_dir), 'regen.stamp') + + @staticmethod + def touch_regen_timestamp(build_dir: str) -> None: + with open(Vs2010Backend.get_regen_stampfile(build_dir), 'w', encoding='utf-8'): + pass + + def get_vcvars_command(self): + has_arch_values = 'VSCMD_ARG_TGT_ARCH' in os.environ and 'VSCMD_ARG_HOST_ARCH' in os.environ + + # Use vcvarsall.bat if we found it. + if 'VCINSTALLDIR' in os.environ: + vs_version = os.environ['VisualStudioVersion'] \ + if 'VisualStudioVersion' in os.environ else None + relative_path = 'Auxiliary\\Build\\' if vs_version is not None and vs_version >= '15.0' else '' + script_path = os.environ['VCINSTALLDIR'] + relative_path + 'vcvarsall.bat' + if os.path.exists(script_path): + if has_arch_values: + target_arch = os.environ['VSCMD_ARG_TGT_ARCH'] + host_arch = os.environ['VSCMD_ARG_HOST_ARCH'] + else: + target_arch = os.environ.get('Platform', 'x86') + host_arch = target_arch + arch = host_arch + '_' + target_arch if host_arch != target_arch else target_arch + return f'"{script_path}" {arch}' + + # Otherwise try the VS2017 Developer Command Prompt. + if 'VS150COMNTOOLS' in os.environ and has_arch_values: + script_path = os.environ['VS150COMNTOOLS'] + 'VsDevCmd.bat' + if os.path.exists(script_path): + return '"%s" -arch=%s -host_arch=%s' % \ + (script_path, os.environ['VSCMD_ARG_TGT_ARCH'], os.environ['VSCMD_ARG_HOST_ARCH']) + return '' + + def get_obj_target_deps(self, obj_list): + result = {} + for o in obj_list: + if isinstance(o, build.ExtractedObjects): + result[o.target.get_id()] = o.target + return result.items() + + def get_target_deps(self, t: T.Dict[T.Any, build.Target], recursive=False): + all_deps: T.Dict[str, build.Target] = {} + for target in t.values(): + if isinstance(target, build.CustomTarget): + for d in target.get_target_dependencies(): + # FIXME: this isn't strictly correct, as the target doesn't + # Get dependencies on non-targets, such as Files + if isinstance(d, build.Target): + all_deps[d.get_id()] = d + elif isinstance(target, build.RunTarget): + for d in target.get_dependencies(): + all_deps[d.get_id()] = d + elif isinstance(target, build.BuildTarget): + for ldep in target.link_targets: + if isinstance(ldep, build.CustomTargetIndex): + all_deps[ldep.get_id()] = ldep.target + else: + all_deps[ldep.get_id()] = ldep + for ldep in target.link_whole_targets: + if isinstance(ldep, build.CustomTargetIndex): + all_deps[ldep.get_id()] = ldep.target + else: + all_deps[ldep.get_id()] = ldep + + for ldep in target.link_depends: + if isinstance(ldep, build.CustomTargetIndex): + all_deps[ldep.get_id()] = ldep.target + elif isinstance(ldep, File): + # Already built, no target references needed + pass + else: + all_deps[ldep.get_id()] = ldep + + for obj_id, objdep in self.get_obj_target_deps(target.objects): + all_deps[obj_id] = objdep + else: + raise MesonException(f'Unknown target type for target {target}') + + for gendep in target.get_generated_sources(): + if isinstance(gendep, build.CustomTarget): + all_deps[gendep.get_id()] = gendep + elif isinstance(gendep, build.CustomTargetIndex): + all_deps[gendep.target.get_id()] = gendep.target + else: + generator = gendep.get_generator() + gen_exe = generator.get_exe() + if isinstance(gen_exe, build.Executable): + all_deps[gen_exe.get_id()] = gen_exe + for d in itertools.chain(generator.depends, gendep.depends): + if isinstance(d, build.CustomTargetIndex): + all_deps[d.get_id()] = d.target + elif isinstance(d, build.Target): + all_deps[d.get_id()] = d + # FIXME: we don't handle other kinds of deps correctly here, such + # as GeneratedLists, StructuredSources, and generated File. + + if not t or not recursive: + return all_deps + ret = self.get_target_deps(all_deps, recursive) + ret.update(all_deps) + return ret + + def generate_solution_dirs(self, ofile: str, parents: T.Sequence[Path]) -> None: + prj_templ = 'Project("{%s}") = "%s", "%s", "{%s}"\n' + iterpaths = reversed(parents) + # Skip first path + next(iterpaths) + for path in iterpaths: + if path not in self.subdirs: + basename = path.name + identifier = generate_guid_from_path(path, 'subdir') + # top-level directories have None as their parent_dir + parent_dir = path.parent + parent_identifier = self.subdirs[parent_dir][0] \ + if parent_dir != PurePath('.') else None + self.subdirs[path] = (identifier, parent_identifier) + prj_line = prj_templ % ( + self.environment.coredata.lang_guids['directory'], + basename, basename, self.subdirs[path][0]) + ofile.write(prj_line) + ofile.write('EndProject\n') + + def generate_solution(self, sln_filename: str, projlist: T.List[Project]) -> None: + default_projlist = self.get_build_by_default_targets() + default_projlist.update(self.get_testlike_targets()) + sln_filename_tmp = sln_filename + '~' + # Note using the utf-8 BOM requires the blank line, otherwise Visual Studio Version Selector fails. + # Without the BOM, VSVS fails if there is a blank line. + with open(sln_filename_tmp, 'w', encoding='utf-8-sig') as ofile: + ofile.write('\nMicrosoft Visual Studio Solution File, Format Version %s\n' % self.sln_file_version) + ofile.write('# Visual Studio %s\n' % self.sln_version_comment) + prj_templ = 'Project("{%s}") = "%s", "%s", "{%s}"\n' + for prj in projlist: + if self.environment.coredata.get_option(OptionKey('layout')) == 'mirror': + self.generate_solution_dirs(ofile, prj[1].parents) + target = self.build.targets[prj[0]] + lang = 'default' + if hasattr(target, 'compilers') and target.compilers: + for lang_out in target.compilers.keys(): + lang = lang_out + break + prj_line = prj_templ % ( + self.environment.coredata.lang_guids[lang], + prj[0], prj[1], prj[2]) + ofile.write(prj_line) + target_dict = {target.get_id(): target} + # Get recursive deps + recursive_deps = self.get_target_deps( + target_dict, recursive=True) + ofile.write('EndProject\n') + for dep, target in recursive_deps.items(): + if prj[0] in default_projlist: + default_projlist[dep] = target + + test_line = prj_templ % (self.environment.coredata.lang_guids['default'], + 'RUN_TESTS', 'RUN_TESTS.vcxproj', + self.environment.coredata.test_guid) + ofile.write(test_line) + ofile.write('EndProject\n') + if self.gen_lite: # REGEN is replaced by the lighter-weight RECONFIGURE utility, for now. See comment in 'gen_regenproj' + regen_proj_name = 'RECONFIGURE' + regen_proj_fname = 'RECONFIGURE.vcxproj' + else: + regen_proj_name = 'REGEN' + regen_proj_fname = 'REGEN.vcxproj' + regen_line = prj_templ % (self.environment.coredata.lang_guids['default'], + regen_proj_name, regen_proj_fname, + self.environment.coredata.regen_guid) + ofile.write(regen_line) + ofile.write('EndProject\n') + install_line = prj_templ % (self.environment.coredata.lang_guids['default'], + 'RUN_INSTALL', 'RUN_INSTALL.vcxproj', + self.environment.coredata.install_guid) + ofile.write(install_line) + ofile.write('EndProject\n') + ofile.write('Global\n') + ofile.write('\tGlobalSection(SolutionConfigurationPlatforms) = ' + 'preSolution\n') + multi_config_buildtype_list = coredata.get_genvs_default_buildtype_list() if self.gen_lite else [self.buildtype] + for buildtype in multi_config_buildtype_list: + ofile.write('\t\t%s|%s = %s|%s\n' % + (buildtype, self.platform, buildtype, + self.platform)) + ofile.write('\tEndGlobalSection\n') + ofile.write('\tGlobalSection(ProjectConfigurationPlatforms) = ' + 'postSolution\n') + # REGEN project (multi-)configurations + for buildtype in multi_config_buildtype_list: + ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' % + (self.environment.coredata.regen_guid, buildtype, + self.platform, buildtype, self.platform)) + if not self.gen_lite: # With a 'genvslite'-generated solution, the regen (i.e. reconfigure) utility is only intended to run when the user explicitly builds this proj. + ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' % + (self.environment.coredata.regen_guid, buildtype, + self.platform, buildtype, self.platform)) + # Create the solution configuration + for p in projlist: + if p[3] is MachineChoice.BUILD: + config_platform = self.build_platform + else: + config_platform = self.platform + # Add to the list of projects in this solution + for buildtype in multi_config_buildtype_list: + ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' % + (p[2], buildtype, self.platform, + buildtype, config_platform)) + # If we're building the solution with Visual Studio's build system, enable building of buildable + # projects. However, if we're building with meson (via --genvslite), then, since each project's + # 'build' action just ends up doing the same 'meson compile ...' we don't want the 'solution build' + # repeatedly going off and doing the same 'meson compile ...' multiple times over, so we just + # leave it up to the user to select or build just one project. + # FIXME: Would be slightly nicer if we could enable building of just one top level target/project, + # but not sure how to identify that. + if not self.gen_lite and \ + p[0] in default_projlist and \ + not isinstance(self.build.targets[p[0]], build.RunTarget): + ofile.write('\t\t{%s}.%s|%s.Build.0 = %s|%s\n' % + (p[2], buildtype, self.platform, + buildtype, config_platform)) + # RUN_TESTS and RUN_INSTALL project (multi-)configurations + for buildtype in multi_config_buildtype_list: + ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' % + (self.environment.coredata.test_guid, buildtype, + self.platform, buildtype, self.platform)) + ofile.write('\t\t{%s}.%s|%s.ActiveCfg = %s|%s\n' % + (self.environment.coredata.install_guid, buildtype, + self.platform, buildtype, self.platform)) + ofile.write('\tEndGlobalSection\n') + ofile.write('\tGlobalSection(SolutionProperties) = preSolution\n') + ofile.write('\t\tHideSolutionNode = FALSE\n') + ofile.write('\tEndGlobalSection\n') + if self.subdirs: + ofile.write('\tGlobalSection(NestedProjects) = ' + 'preSolution\n') + for p in projlist: + if p[1].parent != PurePath('.'): + ofile.write("\t\t{{{}}} = {{{}}}\n".format(p[2], self.subdirs[p[1].parent][0])) + for subdir in self.subdirs.values(): + if subdir[1]: + ofile.write("\t\t{{{}}} = {{{}}}\n".format(subdir[0], subdir[1])) + ofile.write('\tEndGlobalSection\n') + ofile.write('EndGlobal\n') + replace_if_different(sln_filename, sln_filename_tmp) + + def generate_projects(self, vslite_ctx: dict = None) -> T.List[Project]: + startup_project = self.environment.coredata.options[OptionKey('backend_startup_project')].value + projlist: T.List[Project] = [] + startup_idx = 0 + for (i, (name, target)) in enumerate(self.build.targets.items()): + if startup_project and startup_project == target.get_basename(): + startup_idx = i + outdir = Path( + self.environment.get_build_dir(), + self.get_target_dir(target) + ) + outdir.mkdir(exist_ok=True, parents=True) + fname = name + '.vcxproj' + target_dir = PurePath(self.get_target_dir(target)) + relname = target_dir / fname + projfile_path = outdir / fname + proj_uuid = self.environment.coredata.target_guids[name] + generated = self.gen_vcxproj(target, str(projfile_path), proj_uuid, vslite_ctx) + if generated: + projlist.append((name, relname, proj_uuid, target.for_machine)) + + # Put the startup project first in the project list + if startup_idx: + projlist.insert(0, projlist.pop(startup_idx)) + + return projlist + + def split_sources(self, srclist): + sources = [] + headers = [] + objects = [] + languages = [] + for i in srclist: + if self.environment.is_header(i): + headers.append(i) + elif self.environment.is_object(i): + objects.append(i) + elif self.environment.is_source(i): + sources.append(i) + lang = self.lang_from_source_file(i) + if lang not in languages: + languages.append(lang) + elif self.environment.is_library(i): + pass + else: + # Everything that is not an object or source file is considered a header. + headers.append(i) + return sources, headers, objects, languages + + def target_to_build_root(self, target): + if self.get_target_dir(target) == '': + return '' + + directories = os.path.normpath(self.get_target_dir(target)).split(os.sep) + return os.sep.join(['..'] * len(directories)) + + def quote_arguments(self, arr): + return ['"%s"' % i for i in arr] + + def add_project_reference(self, root: ET.Element, include: str, projid: str, link_outputs: bool = False) -> None: + ig = ET.SubElement(root, 'ItemGroup') + pref = ET.SubElement(ig, 'ProjectReference', Include=include) + ET.SubElement(pref, 'Project').text = '{%s}' % projid + if not link_outputs: + # Do not link in generated .lib files from dependencies automatically. + # We only use the dependencies for ordering and link in the generated + # objects and .lib files manually. + ET.SubElement(pref, 'LinkLibraryDependencies').text = 'false' + + def add_target_deps(self, root: ET.Element, target): + target_dict = {target.get_id(): target} + for dep in self.get_target_deps(target_dict).values(): + if dep.get_id() in self.handled_target_deps[target.get_id()]: + # This dependency was already handled manually. + continue + relpath = self.get_target_dir_relative_to(dep, target) + vcxproj = os.path.join(relpath, dep.get_id() + '.vcxproj') + tid = self.environment.coredata.target_guids[dep.get_id()] + self.add_project_reference(root, vcxproj, tid) + + def create_basic_project_filters(self) -> ET.Element: + root = ET.Element('Project', {'ToolsVersion': '4.0', + 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}) + return root + + def create_basic_project(self, target_name, *, + temp_dir, + guid, + conftype='Utility', + target_ext=None, + target_platform=None) -> T.Tuple[ET.Element, ET.Element]: + root = ET.Element('Project', {'DefaultTargets': "Build", + 'ToolsVersion': '4.0', + 'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}) + + confitems = ET.SubElement(root, 'ItemGroup', {'Label': 'ProjectConfigurations'}) + if not target_platform: + target_platform = self.platform + + multi_config_buildtype_list = coredata.get_genvs_default_buildtype_list() if self.gen_lite else [self.buildtype] + for buildtype in multi_config_buildtype_list: + prjconf = ET.SubElement(confitems, 'ProjectConfiguration', + {'Include': buildtype + '|' + target_platform}) + ET.SubElement(prjconf, 'Configuration').text = buildtype + ET.SubElement(prjconf, 'Platform').text = target_platform + + # Globals + globalgroup = ET.SubElement(root, 'PropertyGroup', Label='Globals') + guidelem = ET.SubElement(globalgroup, 'ProjectGuid') + guidelem.text = '{%s}' % guid + kw = ET.SubElement(globalgroup, 'Keyword') + kw.text = self.platform + 'Proj' + + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.Default.props') + + # Configuration + type_config = ET.SubElement(root, 'PropertyGroup', Label='Configuration') + ET.SubElement(type_config, 'ConfigurationType').text = conftype + if self.platform_toolset: + ET.SubElement(type_config, 'PlatformToolset').text = self.platform_toolset + + # This must come AFTER the '' element; importing before the 'PlatformToolset' elt + # gets set leads to msbuild failures reporting - + # "The build tools for v142 (Platform Toolset = 'v142') cannot be found. ... please install v142 build tools." + # This is extremely unhelpful and misleading since the v14x build tools ARE installed. + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.props') + + if not self.gen_lite: # Plenty of elements aren't necessary for 'makefile'-style project that just redirects to meson builds + # XXX Wasn't here before for anything but gen_vcxproj , but seems fine? + ns = ET.SubElement(globalgroup, 'RootNamespace') + ns.text = target_name + + p = ET.SubElement(globalgroup, 'Platform') + p.text = target_platform + pname = ET.SubElement(globalgroup, 'ProjectName') + pname.text = target_name + if self.windows_target_platform_version: + ET.SubElement(globalgroup, 'WindowsTargetPlatformVersion').text = self.windows_target_platform_version + ET.SubElement(globalgroup, 'UseMultiToolTask').text = 'true' + + ET.SubElement(type_config, 'CharacterSet').text = 'MultiByte' + # Fixme: wasn't here before for gen_vcxproj() + ET.SubElement(type_config, 'UseOfMfc').text = 'false' + + # Project information + direlem = ET.SubElement(root, 'PropertyGroup') + fver = ET.SubElement(direlem, '_ProjectFileVersion') + fver.text = self.project_file_version + outdir = ET.SubElement(direlem, 'OutDir') + outdir.text = '.\\' + intdir = ET.SubElement(direlem, 'IntDir') + intdir.text = temp_dir + '\\' + + tname = ET.SubElement(direlem, 'TargetName') + tname.text = target_name + + if target_ext: + ET.SubElement(direlem, 'TargetExt').text = target_ext + + return (root, type_config) + + def gen_run_target_vcxproj(self, target: build.RunTarget, ofname: str, guid: str) -> None: + (root, type_config) = self.create_basic_project(target.name, + temp_dir=target.get_id(), + guid=guid) + depend_files = self.get_custom_target_depend_files(target) + + if not target.command: + # This is an alias target and thus doesn't run any command. It's + # enough to emit the references to the other projects for them to + # be built/run/..., if necessary. + assert isinstance(target, build.AliasTarget) + assert len(depend_files) == 0 + else: + assert not isinstance(target, build.AliasTarget) + + target_env = self.get_run_target_env(target) + _, _, cmd_raw = self.eval_custom_target_command(target) + wrapper_cmd, _ = self.as_meson_exe_cmdline(target.command[0], cmd_raw[1:], + force_serialize=True, env=target_env, + verbose=True) + self.add_custom_build(root, 'run_target', ' '.join(self.quote_arguments(wrapper_cmd)), + deps=depend_files) + + # The import is needed even for alias targets, otherwise the build + # target isn't defined + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets') + self.add_regen_dependency(root) + self.add_target_deps(root, target) + self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname) + + def gen_custom_target_vcxproj(self, target: build.CustomTarget, ofname: str, guid: str) -> None: + if target.for_machine is MachineChoice.BUILD: + platform = self.build_platform + else: + platform = self.platform + (root, type_config) = self.create_basic_project(target.name, + temp_dir=target.get_id(), + guid=guid, + target_platform=platform) + # We need to always use absolute paths because our invocation is always + # from the target dir, not the build root. + target.absolute_paths = True + (srcs, ofilenames, cmd) = self.eval_custom_target_command(target, True) + depend_files = self.get_custom_target_depend_files(target, True) + # Always use a wrapper because MSBuild eats random characters when + # there are many arguments. + tdir_abs = os.path.join(self.environment.get_build_dir(), self.get_target_dir(target)) + extra_bdeps = target.get_transitive_build_target_deps() + wrapper_cmd, _ = self.as_meson_exe_cmdline(target.command[0], cmd[1:], + # All targets run from the target dir + workdir=tdir_abs, + extra_bdeps=extra_bdeps, + capture=ofilenames[0] if target.capture else None, + feed=srcs[0] if target.feed else None, + force_serialize=True, + env=target.env, + verbose=target.console) + if target.build_always_stale: + # Use a nonexistent file to always consider the target out-of-date. + ofilenames += [self.nonexistent_file(os.path.join(self.environment.get_scratch_dir(), + 'outofdate.file'))] + self.add_custom_build(root, 'custom_target', ' '.join(self.quote_arguments(wrapper_cmd)), + deps=wrapper_cmd[-1:] + srcs + depend_files, outputs=ofilenames, + verify_files=not target.build_always_stale) + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets') + self.generate_custom_generator_commands(target, root) + self.add_regen_dependency(root) + self.add_target_deps(root, target) + self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname) + + def gen_compile_target_vcxproj(self, target: build.CompileTarget, ofname: str, guid: str) -> None: + if target.for_machine is MachineChoice.BUILD: + platform = self.build_platform + else: + platform = self.platform + (root, type_config) = self.create_basic_project(target.name, + temp_dir=target.get_id(), + guid=guid, + target_platform=platform) + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets') + target.generated = [self.compile_target_to_generator(target)] + target.sources = [] + self.generate_custom_generator_commands(target, root) + self.add_regen_dependency(root) + self.add_target_deps(root, target) + self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname) + + @classmethod + def lang_from_source_file(cls, src): + ext = src.split('.')[-1] + if ext in compilers.c_suffixes: + return 'c' + if ext in compilers.cpp_suffixes: + return 'cpp' + raise MesonException(f'Could not guess language from source file {src}.') + + def add_pch(self, pch_sources, lang, inc_cl): + if lang in pch_sources: + self.use_pch(pch_sources, lang, inc_cl) + + def create_pch(self, pch_sources, lang, inc_cl): + pch = ET.SubElement(inc_cl, 'PrecompiledHeader') + pch.text = 'Create' + self.add_pch_files(pch_sources, lang, inc_cl) + + def use_pch(self, pch_sources, lang, inc_cl): + pch = ET.SubElement(inc_cl, 'PrecompiledHeader') + pch.text = 'Use' + header = self.add_pch_files(pch_sources, lang, inc_cl) + pch_include = ET.SubElement(inc_cl, 'ForcedIncludeFiles') + pch_include.text = header + ';%(ForcedIncludeFiles)' + + def add_pch_files(self, pch_sources, lang, inc_cl): + header = os.path.basename(pch_sources[lang][0]) + pch_file = ET.SubElement(inc_cl, 'PrecompiledHeaderFile') + # When USING PCHs, MSVC will not do the regular include + # directory lookup, but simply use a string match to find the + # PCH to use. That means the #include directive must match the + # pch_file.text used during PCH CREATION verbatim. + # When CREATING a PCH, MSVC will do the include directory + # lookup to find the actual PCH header to use. Thus, the PCH + # header must either be in the include_directories of the target + # or be in the same directory as the PCH implementation. + pch_file.text = header + pch_out = ET.SubElement(inc_cl, 'PrecompiledHeaderOutputFile') + pch_out.text = f'$(IntDir)$(TargetName)-{lang}.pch' + + # Need to set the name for the pdb, as cl otherwise gives it a static + # name. Which leads to problems when there is more than one pch + # (e.g. for different languages). + pch_pdb = ET.SubElement(inc_cl, 'ProgramDataBaseFileName') + pch_pdb.text = f'$(IntDir)$(TargetName)-{lang}.pdb' + + return header + + def is_argument_with_msbuild_xml_entry(self, entry): + # Remove arguments that have a top level XML entry so + # they are not used twice. + # FIXME add args as needed. + if entry[1:].startswith('fsanitize'): + return True + return entry[1:].startswith('M') + + def add_additional_options(self, lang, parent_node, file_args): + args = [] + for arg in file_args[lang].to_native(): + if self.is_argument_with_msbuild_xml_entry(arg): + continue + if arg == '%(AdditionalOptions)': + args.append(arg) + else: + args.append(self.escape_additional_option(arg)) + ET.SubElement(parent_node, "AdditionalOptions").text = ' '.join(args) + + # Set up each project's source file ('CLCompile') element with appropriate preprocessor, include dir, and compile option values for correct intellisense. + def add_project_nmake_defs_incs_and_opts(self, parent_node, src: str, defs_paths_opts_per_lang_and_buildtype: dict, platform: str): + # For compactness, sources whose type matches the primary src type (i.e. most frequent in the set of source types used in the target/project, + # according to the 'captured_build_args' map), can simply reference the preprocessor definitions, include dirs, and compile option NMake fields of + # the project itself. + # However, if a src is of a non-primary type, it could have totally different defs/dirs/options so we're going to have to fill in the full, verbose + # set of values for these fields, which needs to be fully expanded per build type / configuration. + # + # FIXME: Suppose a project contains .cpp and .c src files with different compile defs/dirs/options, while also having .h files, some of which + # are included by .cpp sources and others included by .c sources: How do we know whether the .h source should be using the .cpp or .c src + # defs/dirs/options? Might it also be possible for a .h header to be shared between .cpp and .c sources? If so, I don't see how we can + # correctly configure these intellisense fields. + # For now, all sources/headers that fail to find their extension's language in the '...nmake_defs_paths_opts...' map will just adopt the project + # defs/dirs/opts that are set for the nominal 'primary' src type. + ext = src.split('.')[-1] + lang = compilers.compilers.SUFFIX_TO_LANG.get(ext, None) + if lang in defs_paths_opts_per_lang_and_buildtype.keys(): + # This is a non-primary src type for which can't simply reference the project's nmake fields; + # we must laboriously fill in the fields for all buildtypes. + for buildtype in coredata.get_genvs_default_buildtype_list(): + (defs, paths, opts) = defs_paths_opts_per_lang_and_buildtype[lang][buildtype] + condition = f'\'$(Configuration)|$(Platform)\'==\'{buildtype}|{platform}\'' + ET.SubElement(parent_node, 'PreprocessorDefinitions', Condition=condition).text = defs + ET.SubElement(parent_node, 'AdditionalIncludeDirectories', Condition=condition).text = paths + ET.SubElement(parent_node, 'AdditionalOptions', Condition=condition).text = opts + else: # Can't find bespoke nmake defs/dirs/opts fields for this extention, so just reference the project's fields + ET.SubElement(parent_node, 'PreprocessorDefinitions').text = '$(NMakePreprocessorDefinitions)' + ET.SubElement(parent_node, 'AdditionalIncludeDirectories').text = '$(NMakeIncludeSearchPath)' + ET.SubElement(parent_node, 'AdditionalOptions').text = '$(AdditionalOptions)' + + def add_preprocessor_defines(self, lang, parent_node, file_defines): + defines = [] + for define in file_defines[lang]: + if define == '%(PreprocessorDefinitions)': + defines.append(define) + else: + defines.append(self.escape_preprocessor_define(define)) + ET.SubElement(parent_node, "PreprocessorDefinitions").text = ';'.join(defines) + + def add_include_dirs(self, lang, parent_node, file_inc_dirs): + dirs = file_inc_dirs[lang] + ET.SubElement(parent_node, "AdditionalIncludeDirectories").text = ';'.join(dirs) + + @staticmethod + def has_objects(objects, additional_objects, generated_objects): + # Ignore generated objects, those are automatically used by MSBuild because they are part of + # the CustomBuild Outputs. + return len(objects) + len(additional_objects) > 0 + + @staticmethod + def add_generated_objects(node, generated_objects): + # Do not add generated objects to project file. Those are automatically used by MSBuild, because + # they are part of the CustomBuild Outputs. + return + + @staticmethod + def escape_preprocessor_define(define: str) -> str: + # See: https://msdn.microsoft.com/en-us/library/bb383819.aspx + table = str.maketrans({'%': '%25', '$': '%24', '@': '%40', + "'": '%27', ';': '%3B', '?': '%3F', '*': '%2A', + # We need to escape backslash because it'll be un-escaped by + # Windows during process creation when it parses the arguments + # Basically, this converts `\` to `\\`. + '\\': '\\\\'}) + return define.translate(table) + + @staticmethod + def escape_additional_option(option: str) -> str: + # See: https://msdn.microsoft.com/en-us/library/bb383819.aspx + table = str.maketrans({'%': '%25', '$': '%24', '@': '%40', + "'": '%27', ';': '%3B', '?': '%3F', '*': '%2A', ' ': '%20'}) + option = option.translate(table) + # Since we're surrounding the option with ", if it ends in \ that will + # escape the " when the process arguments are parsed and the starting + # " will not terminate. So we escape it if that's the case. I'm not + # kidding, this is how escaping works for process args on Windows. + if option.endswith('\\'): + option += '\\' + return f'"{option}"' + + @staticmethod + def add_filter_info(list_filters_path, filter_group, sub_element, file_path, forced_filter_name=None, down=''): + filter_inc_cl = ET.SubElement(filter_group, sub_element, Include=file_path) + + # Force the subdir + if forced_filter_name: + filter_path = forced_filter_name + else: + # Create a subdir following the placement if on the same drive + filter_path = Path(file_path).resolve().parent + if Path(file_path).drive == Path(down).drive: + filter_path = Path(os.path.relpath(str(filter_path), down)).as_posix().replace('../', '').replace('..', '') + else: + return # No filter needed + + # Needed to have non posix path + filter_path = filter_path.replace('/', '\\') + + if filter_path and filter_path != '.': + # Remove ending backslash + filter_path = filter_path.rstrip('\\') + # Create a hierarchical level of directories + list_path = filter_path.split('\\') + new_filter_path = '' + for path in list_path: + if new_filter_path: + new_filter_path = new_filter_path + '\\' + path + else: + new_filter_path = path + list_filters_path.add(new_filter_path) + # Create a new filter node for the current file added + ET.SubElement(filter_inc_cl, 'Filter').text = filter_path + + @staticmethod + def split_link_args(args): + """ + Split a list of link arguments into three lists: + * library search paths + * library filenames (or paths) + * other link arguments + """ + lpaths = [] + libs = [] + other = [] + for arg in args: + if arg.startswith('/LIBPATH:'): + lpath = arg[9:] + # De-dup library search paths by removing older entries when + # a new one is found. This is necessary because unlike other + # search paths such as the include path, the library is + # searched for in the newest (right-most) search path first. + if lpath in lpaths: + lpaths.remove(lpath) + lpaths.append(lpath) + elif arg.startswith(('/', '-')): + other.append(arg) + # It's ok if we miss libraries with non-standard extensions here. + # They will go into the general link arguments. + elif arg.endswith('.lib') or arg.endswith('.a'): + # De-dup + if arg not in libs: + libs.append(arg) + else: + other.append(arg) + return lpaths, libs, other + + def _get_cl_compiler(self, target): + for lang, c in target.compilers.items(): + if lang in {'c', 'cpp'}: + return c + # No source files, only objects, but we still need a compiler, so + # return a found compiler + if len(target.objects) > 0: + for lang, c in self.environment.coredata.compilers[target.for_machine].items(): + if lang in {'c', 'cpp'}: + return c + raise MesonException('Could not find a C or C++ compiler. MSVC can only build C/C++ projects.') + + def _prettyprint_vcxproj_xml(self, tree: ET.ElementTree, ofname: str) -> None: + ofname_tmp = ofname + '~' + tree.write(ofname_tmp, encoding='utf-8', xml_declaration=True) + + # ElementTree cannot do pretty-printing, so do it manually + doc = xml.dom.minidom.parse(ofname_tmp) + with open(ofname_tmp, 'w', encoding='utf-8') as of: + of.write(doc.toprettyxml()) + replace_if_different(ofname, ofname_tmp) + + # Returns: (target_args,file_args), (target_defines,file_defines), (target_inc_dirs,file_inc_dirs) + def get_args_defines_and_inc_dirs(self, target, compiler, generated_files_include_dirs, proj_to_src_root, proj_to_src_dir, build_args): + # Arguments, include dirs, defines for all files in the current target + target_args = [] + target_defines = [] + target_inc_dirs = [] + # Arguments, include dirs, defines passed to individual files in + # a target; perhaps because the args are language-specific + # + # file_args is also later split out into defines and include_dirs in + # case someone passed those in there + file_args: T.Dict[str, CompilerArgs] = {l: c.compiler_args() for l, c in target.compilers.items()} + file_defines = {l: [] for l in target.compilers} + file_inc_dirs = {l: [] for l in target.compilers} + # The order in which these compile args are added must match + # generate_single_compile() and generate_basic_compiler_args() + for l, comp in target.compilers.items(): + if l in file_args: + file_args[l] += compilers.get_base_compile_args( + target.get_options(), comp) + file_args[l] += comp.get_option_compile_args( + target.get_options()) + + # Add compile args added using add_project_arguments() + for l, args in self.build.projects_args[target.for_machine].get(target.subproject, {}).items(): + if l in file_args: + file_args[l] += args + # Add compile args added using add_global_arguments() + # These override per-project arguments + for l, args in self.build.global_args[target.for_machine].items(): + if l in file_args: + file_args[l] += args + # Compile args added from the env or cross file: CFLAGS/CXXFLAGS, etc. We want these + # to override all the defaults, but not the per-target compile args. + for l in file_args.keys(): + file_args[l] += target.get_option(OptionKey('args', machine=target.for_machine, lang=l)) + for args in file_args.values(): + # This is where Visual Studio will insert target_args, target_defines, + # etc, which are added later from external deps (see below). + args += ['%(AdditionalOptions)', '%(PreprocessorDefinitions)', '%(AdditionalIncludeDirectories)'] + # Add custom target dirs as includes automatically, but before + # target-specific include dirs. See _generate_single_compile() in + # the ninja backend for caveats. + args += ['-I' + arg for arg in generated_files_include_dirs] + # Add include dirs from the `include_directories:` kwarg on the target + # and from `include_directories:` of internal deps of the target. + # + # Target include dirs should override internal deps include dirs. + # This is handled in BuildTarget.process_kwargs() + # + # Include dirs from internal deps should override include dirs from + # external deps and must maintain the order in which they are + # specified. Hence, we must reverse so that the order is preserved. + # + # These are per-target, but we still add them as per-file because we + # need them to be looked in first. + for d in reversed(target.get_include_dirs()): + # reversed is used to keep order of includes + for i in reversed(d.get_incdirs()): + curdir = os.path.join(d.get_curdir(), i) + try: + # Add source subdir first so that the build subdir overrides it + args.append('-I' + os.path.join(proj_to_src_root, curdir)) # src dir + args.append('-I' + self.relpath(curdir, target.subdir)) # build dir + except ValueError: + # Include is on different drive + args.append('-I' + os.path.normpath(curdir)) + for i in d.get_extra_build_dirs(): + curdir = os.path.join(d.get_curdir(), i) + args.append('-I' + self.relpath(curdir, target.subdir)) # build dir + # Add per-target compile args, f.ex, `c_args : ['/DFOO']`. We set these + # near the end since these are supposed to override everything else. + for l, args in target.extra_args.items(): + if l in file_args: + file_args[l] += args + # The highest priority includes. In order of directory search: + # target private dir, target build dir, target source dir + for args in file_args.values(): + t_inc_dirs = [self.relpath(self.get_target_private_dir(target), + self.get_target_dir(target))] + if target.implicit_include_directories: + t_inc_dirs += ['.', proj_to_src_dir] + args += ['-I' + arg for arg in t_inc_dirs] + + # Split preprocessor defines and include directories out of the list of + # all extra arguments. The rest go into %(AdditionalOptions). + for l, args in file_args.items(): + for arg in args[:]: + if arg.startswith(('-D', '/D')) or arg == '%(PreprocessorDefinitions)': + file_args[l].remove(arg) + # Don't escape the marker + if arg == '%(PreprocessorDefinitions)': + define = arg + else: + define = arg[2:] + # De-dup + if define not in file_defines[l]: + file_defines[l].append(define) + elif arg.startswith(('-I', '/I')) or arg == '%(AdditionalIncludeDirectories)': + file_args[l].remove(arg) + # Don't escape the marker + if arg == '%(AdditionalIncludeDirectories)': + inc_dir = arg + else: + inc_dir = arg[2:] + # De-dup + if inc_dir not in file_inc_dirs[l]: + file_inc_dirs[l].append(inc_dir) + # Add include dirs to target as well so that "Go to Document" works in headers + if inc_dir not in target_inc_dirs: + target_inc_dirs.append(inc_dir) + + # Split compile args needed to find external dependencies + # Link args are added while generating the link command + for d in reversed(target.get_external_deps()): + # Cflags required by external deps might have UNIX-specific flags, + # so filter them out if needed + if d.name != 'openmp': + d_compile_args = compiler.unix_args_to_native(d.get_compile_args()) + for arg in d_compile_args: + if arg.startswith(('-D', '/D')): + define = arg[2:] + # De-dup + if define in target_defines: + target_defines.remove(define) + target_defines.append(define) + elif arg.startswith(('-I', '/I')): + inc_dir = arg[2:] + # De-dup + if inc_dir not in target_inc_dirs: + target_inc_dirs.append(inc_dir) + else: + target_args.append(arg) + + if '/Gw' in build_args: + target_args.append('/Gw') + + return (target_args, file_args), (target_defines, file_defines), (target_inc_dirs, file_inc_dirs) + + @staticmethod + def get_build_args(compiler, buildtype: str, optimization_level: str, debug: bool, sanitize: str) -> T.List[str]: + build_args = compiler.get_buildtype_args(buildtype) + build_args += compiler.get_optimization_args(optimization_level) + build_args += compiler.get_debug_args(debug) + build_args += compiler.sanitizer_compile_args(sanitize) + + return build_args + + #Convert a list of compile arguments from - + # [ '-I..\\some\\dir\\include', '-I../../some/other/dir', '/MDd', '/W2', '/std:c++17', '/Od', '/Zi', '-DSOME_DEF=1', '-DANOTHER_DEF=someval', ...] + #to - + # 'SOME_DEF=1;ANOTHER_DEF=someval;' + #which is the format required by the visual studio project's NMakePreprocessorDefinitions field. + @staticmethod + def extract_nmake_preprocessor_defs(captured_build_args: list[str]) -> str: + defs = '' + for arg in captured_build_args: + if arg.startswith(('-D', '/D')): + defs += arg[2:] + ';' + return defs + + #Convert a list of compile arguments from - + # [ '-I..\\some\\dir\\include', '-I../../some/other/dir', '/MDd', '/W2', '/std:c++17', '/Od', '/Zi', '-DSOME_DEF=1', '-DANOTHER_DEF=someval', ...] + #to - + # '..\\some\\dir\\include;../../some/other/dir;' + #which is the format required by the visual studio project's NMakePreprocessorDefinitions field. + @staticmethod + def extract_nmake_include_paths(captured_build_args: list[str]) -> str: + paths = '' + for arg in captured_build_args: + if arg.startswith(('-I', '/I')): + paths += arg[2:] + ';' + return paths + + #Convert a list of compile arguments from - + # [ '-I..\\some\\dir\\include', '-I../../some/other/dir', '/MDd', '/W2', '/std:c++17', '/Od', '/Zi', '-DSOME_DEF=1', '-DANOTHER_DEF=someval', ...] + #to - + # '/MDd;/W2;/std:c++17;/Od/Zi' + #which is the format required by the visual studio project's NMakePreprocessorDefinitions field. + @staticmethod + def extract_intellisense_additional_compiler_options(captured_build_args: list[str]) -> str: + additional_opts = '' + for arg in captured_build_args: + if (not arg.startswith(('-D', '/D', '-I', '/I'))) and arg.startswith(('-', '/')): + additional_opts += arg + ';' + return additional_opts + + @staticmethod + def get_nmake_base_meson_command_and_exe_search_paths() -> T.Tuple[str, str]: + meson_cmd_list = mesonlib.get_meson_command() + assert (len(meson_cmd_list) == 1) or (len(meson_cmd_list) == 2) + # We expect get_meson_command() to either be of the form - + # 1: ['path/to/meson.exe'] + # or - + # 2: ['path/to/python.exe', 'and/path/to/meson.py'] + # so we'd like to ensure our makefile-style project invokes the same meson executable or python src as this instance. + exe_search_paths = os.path.dirname(meson_cmd_list[0]) + nmake_base_meson_command = os.path.basename(meson_cmd_list[0]) + if len(meson_cmd_list) != 1: + # We expect to be dealing with case '2', shown above. + # With Windows, it's also possible that we get a path to the second element of meson_cmd_list that contains spaces + # (e.g. 'and/path to/meson.py'). So, because this will end up directly in the makefile/NMake command lines, we'd + # better always enclose it in quotes. Only strictly necessary for paths with spaces but no harm for paths without - + nmake_base_meson_command += ' \"' + meson_cmd_list[1] + '\"' + exe_search_paths += ';' + os.path.dirname(meson_cmd_list[1]) + + # Additionally, in some cases, we appear to have to add 'C:\Windows\system32;C:\Windows' to the 'Path' environment (via the + # ExecutablePath element), without which, the 'meson compile ...' (NMakeBuildCommandLine) command can fail (failure to find + # stdio.h and similar), so something is quietly switching some critical build behaviour based on the presence of these in + # the 'Path'. + # Not sure if this ultimately comes down to some 'find and guess' hidden behaviours within meson or within MSVC tools, but + # I guess some projects may implicitly rely on this behaviour. + # Things would be cleaner, more robust, repeatable, and portable if meson (and msvc tools) replaced all this kind of + # find/guess behaviour with the requirement that things just be explicitly specified by the user. + # An example of this can be seen with - + # 1: Download https://github.com/facebook/zstd source + # 2: cd to the 'zstd-dev\build\meson' dir + # 3: meson setup -Dbin_programs=true -Dbin_contrib=true --genvslite vs2022 builddir_vslite + # 4: Open the generated 'builddir_vslite_vs\zstd.sln' and build through a project, which should explicitly add the above to + # the project's 'Executable Directories' paths and build successfully. + # 5: Remove 'C:\Windows\system32;C:\Windows;' from the same project's 'Executable Directories' paths and rebuild. + # This should now fail. + # It feels uncomfortable to do this but what better alternative is there (and might this introduce new problems)? - + exe_search_paths += ';C:\\Windows\\system32;C:\\Windows' + # A meson project that explicitly specifies compiler/linker tools and sdk/include paths is not going to have any problems + # with this addition. + + return (nmake_base_meson_command, exe_search_paths) + + def add_gen_lite_makefile_vcxproj_elements(self, + root: ET.Element, + platform: str, + target_ext: str, + vslite_ctx: dict, + target, + proj_to_build_root: str, + primary_src_lang: T.Optional[str]) -> None: + ET.SubElement(root, 'ImportGroup', Label='ExtensionSettings') + ET.SubElement(root, 'ImportGroup', Label='Shared') + prop_sheets_grp = ET.SubElement(root, 'ImportGroup', Label='PropertySheets') + ET.SubElement(prop_sheets_grp, 'Import', {'Project': r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props', + 'Condition': r"exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')", + 'Label': 'LocalAppDataPlatform' + }) + ET.SubElement(root, 'PropertyGroup', Label='UserMacros') + + (nmake_base_meson_command, exe_search_paths) = Vs2010Backend.get_nmake_base_meson_command_and_exe_search_paths() + + # Relative path from this .vcxproj to the directory containing the set of '..._[debug/debugoptimized/release]' setup meson build dirs. + proj_to_multiconfigured_builds_parent_dir = os.path.join(proj_to_build_root, '..') + + # Conditional property groups per configuration (buildtype). E.g. - + # + multi_config_buildtype_list = coredata.get_genvs_default_buildtype_list() + for buildtype in multi_config_buildtype_list: + per_config_prop_group = ET.SubElement(root, 'PropertyGroup', Condition=f'\'$(Configuration)|$(Platform)\'==\'{buildtype}|{platform}\'') + (_, build_dir_tail) = os.path.split(self.src_to_build) + meson_build_dir_for_buildtype = build_dir_tail[:-2] + buildtype # Get the buildtype suffixed 'builddir_[debug/release/etc]' from 'builddir_vs', for example. + proj_to_build_dir_for_buildtype = str(os.path.join(proj_to_multiconfigured_builds_parent_dir, meson_build_dir_for_buildtype)) + ET.SubElement(per_config_prop_group, 'OutDir').text = f'{proj_to_build_dir_for_buildtype}\\' + ET.SubElement(per_config_prop_group, 'IntDir').text = f'{proj_to_build_dir_for_buildtype}\\' + ET.SubElement(per_config_prop_group, 'NMakeBuildCommandLine').text = f'{nmake_base_meson_command} compile -C "{proj_to_build_dir_for_buildtype}"' + ET.SubElement(per_config_prop_group, 'NMakeOutput').text = f'$(OutDir){target.name}{target_ext}' + captured_build_args = vslite_ctx[buildtype][target.get_id()] + # 'captured_build_args' is a dictionary, mapping from each src file type to a list of compile args to use for that type. + # Usually, there's just one but we could have multiple src types. However, since there's only one field for the makefile + # project's NMake... preprocessor/include intellisense fields, we'll just use the first src type we have to fill in + # these fields. Then, any src files in this VS project that aren't of this first src type will then need to override + # its intellisense fields instead of simply referencing the values in the project. + ET.SubElement(per_config_prop_group, 'NMakeReBuildCommandLine').text = f'{nmake_base_meson_command} compile -C "{proj_to_build_dir_for_buildtype}" --clean && {nmake_base_meson_command} compile -C "{proj_to_build_dir_for_buildtype}"' + ET.SubElement(per_config_prop_group, 'NMakeCleanCommandLine').text = f'{nmake_base_meson_command} compile -C "{proj_to_build_dir_for_buildtype}" --clean' + # Need to set the 'ExecutablePath' element for the above NMake... commands to be able to invoke the meson command. + ET.SubElement(per_config_prop_group, 'ExecutablePath').text = exe_search_paths + # We may not have any src files and so won't have a primary src language. In which case, we've nothing to fill in for this target's intellisense fields - + if primary_src_lang: + primary_src_type_build_args = captured_build_args[primary_src_lang] + ET.SubElement(per_config_prop_group, 'NMakePreprocessorDefinitions').text = Vs2010Backend.extract_nmake_preprocessor_defs(primary_src_type_build_args) + ET.SubElement(per_config_prop_group, 'NMakeIncludeSearchPath').text = Vs2010Backend.extract_nmake_include_paths(primary_src_type_build_args) + ET.SubElement(per_config_prop_group, 'AdditionalOptions').text = Vs2010Backend.extract_intellisense_additional_compiler_options(primary_src_type_build_args) + + # Unless we explicitly specify the following empty path elements, the project is assigned a load of nasty defaults that fill these + # with values like - + # $(VC_IncludePath);$(WindowsSDK_IncludePath); + # which are all based on the current install environment (a recipe for non-reproducibility problems), not the paths that will be used by + # the actual meson compile jobs. Although these elements look like they're only for MSBuild operations, they're not needed with our simple, + # lite/makefile-style projects so let's just remove them in case they do get used/confused by intellisense. + ET.SubElement(per_config_prop_group, 'IncludePath') + ET.SubElement(per_config_prop_group, 'ExternalIncludePath') + ET.SubElement(per_config_prop_group, 'ReferencePath') + ET.SubElement(per_config_prop_group, 'LibraryPath') + ET.SubElement(per_config_prop_group, 'LibraryWPath') + ET.SubElement(per_config_prop_group, 'SourcePath') + ET.SubElement(per_config_prop_group, 'ExcludePath') + + def add_non_makefile_vcxproj_elements( + self, + root: ET.Element, + type_config: ET.Element, + target, + platform: str, + subsystem, + build_args, + target_args, + target_defines, + target_inc_dirs, + file_args + ) -> None: + compiler = self._get_cl_compiler(target) + buildtype_link_args = compiler.get_buildtype_linker_args(self.buildtype) + + # Prefix to use to access the build root from the vcxproj dir + down = self.target_to_build_root(target) + + # FIXME: Should the following just be set in create_basic_project(), even if + # irrelevant for current target? + + # FIXME: Meson's LTO support needs to be integrated here + ET.SubElement(type_config, 'WholeProgramOptimization').text = 'false' + # Let VS auto-set the RTC level + ET.SubElement(type_config, 'BasicRuntimeChecks').text = 'Default' + # Incremental linking increases code size + if '/INCREMENTAL:NO' in buildtype_link_args: + ET.SubElement(type_config, 'LinkIncremental').text = 'false' + + # Build information + compiles = ET.SubElement(root, 'ItemDefinitionGroup') + clconf = ET.SubElement(compiles, 'ClCompile') + if True in ((dep.name == 'openmp') for dep in target.get_external_deps()): + ET.SubElement(clconf, 'OpenMPSupport').text = 'true' + # CRT type; debug or release + vscrt_type = target.get_option(OptionKey('b_vscrt')) + if vscrt_type == 'from_buildtype': + if self.buildtype == 'debug': + ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL' + else: + ET.SubElement(type_config, 'UseDebugLibraries').text = 'false' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDLL' + elif vscrt_type == 'static_from_buildtype': + if self.buildtype == 'debug': + ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebug' + else: + ET.SubElement(type_config, 'UseDebugLibraries').text = 'false' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreaded' + elif vscrt_type == 'mdd': + ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebugDLL' + elif vscrt_type == 'mt': + # FIXME, wrong + ET.SubElement(type_config, 'UseDebugLibraries').text = 'false' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreaded' + elif vscrt_type == 'mtd': + # FIXME, wrong + ET.SubElement(type_config, 'UseDebugLibraries').text = 'true' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDebug' + else: + ET.SubElement(type_config, 'UseDebugLibraries').text = 'false' + ET.SubElement(clconf, 'RuntimeLibrary').text = 'MultiThreadedDLL' + # Sanitizers + if '/fsanitize=address' in build_args: + ET.SubElement(type_config, 'EnableASAN').text = 'true' + # Debug format + if '/ZI' in build_args: + ET.SubElement(clconf, 'DebugInformationFormat').text = 'EditAndContinue' + elif '/Zi' in build_args: + ET.SubElement(clconf, 'DebugInformationFormat').text = 'ProgramDatabase' + elif '/Z7' in build_args: + ET.SubElement(clconf, 'DebugInformationFormat').text = 'OldStyle' + else: + ET.SubElement(clconf, 'DebugInformationFormat').text = 'None' + # Runtime checks + if '/RTC1' in build_args: + ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'EnableFastChecks' + elif '/RTCu' in build_args: + ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'UninitializedLocalUsageCheck' + elif '/RTCs' in build_args: + ET.SubElement(clconf, 'BasicRuntimeChecks').text = 'StackFrameRuntimeCheck' + # Exception handling has to be set in the xml in addition to the "AdditionalOptions" because otherwise + # cl will give warning D9025: overriding '/Ehs' with cpp_eh value + if 'cpp' in target.compilers: + eh = target.get_option(OptionKey('eh', machine=target.for_machine, lang='cpp')) + if eh == 'a': + ET.SubElement(clconf, 'ExceptionHandling').text = 'Async' + elif eh == 's': + ET.SubElement(clconf, 'ExceptionHandling').text = 'SyncCThrow' + elif eh == 'none': + ET.SubElement(clconf, 'ExceptionHandling').text = 'false' + else: # 'sc' or 'default' + ET.SubElement(clconf, 'ExceptionHandling').text = 'Sync' + + if len(target_args) > 0: + target_args.append('%(AdditionalOptions)') + ET.SubElement(clconf, "AdditionalOptions").text = ' '.join(target_args) + ET.SubElement(clconf, 'AdditionalIncludeDirectories').text = ';'.join(target_inc_dirs) + target_defines.append('%(PreprocessorDefinitions)') + ET.SubElement(clconf, 'PreprocessorDefinitions').text = ';'.join(target_defines) + ET.SubElement(clconf, 'FunctionLevelLinking').text = 'true' + # Warning level + warning_level = T.cast('str', target.get_option(OptionKey('warning_level'))) + ET.SubElement(clconf, 'WarningLevel').text = 'Level' + str(1 + int(warning_level)) + if target.get_option(OptionKey('werror')): + ET.SubElement(clconf, 'TreatWarningAsError').text = 'true' + # Optimization flags + o_flags = split_o_flags_args(build_args) + if '/Ox' in o_flags: + ET.SubElement(clconf, 'Optimization').text = 'Full' + elif '/O2' in o_flags: + ET.SubElement(clconf, 'Optimization').text = 'MaxSpeed' + elif '/O1' in o_flags: + ET.SubElement(clconf, 'Optimization').text = 'MinSpace' + elif '/Od' in o_flags: + ET.SubElement(clconf, 'Optimization').text = 'Disabled' + if '/Oi' in o_flags: + ET.SubElement(clconf, 'IntrinsicFunctions').text = 'true' + if '/Ob1' in o_flags: + ET.SubElement(clconf, 'InlineFunctionExpansion').text = 'OnlyExplicitInline' + elif '/Ob2' in o_flags: + ET.SubElement(clconf, 'InlineFunctionExpansion').text = 'AnySuitable' + # Size-preserving flags + if '/Os' in o_flags: + ET.SubElement(clconf, 'FavorSizeOrSpeed').text = 'Size' + # Note: setting FavorSizeOrSpeed with clang-cl conflicts with /Od and can make debugging difficult, so don't. + elif '/Od' not in o_flags: + ET.SubElement(clconf, 'FavorSizeOrSpeed').text = 'Speed' + # Note: SuppressStartupBanner is /NOLOGO and is 'true' by default + self.generate_lang_standard_info(file_args, clconf) + + resourcecompile = ET.SubElement(compiles, 'ResourceCompile') + ET.SubElement(resourcecompile, 'PreprocessorDefinitions') + + # Linker options + link = ET.SubElement(compiles, 'Link') + extra_link_args = compiler.compiler_args() + # FIXME: Can these buildtype linker args be added as tags in the + # vcxproj file (similar to buildtype compiler args) instead of in + # AdditionalOptions? + extra_link_args += compiler.get_buildtype_linker_args(self.buildtype) + # Generate Debug info + if self.debug: + self.generate_debug_information(link) + else: + ET.SubElement(link, 'GenerateDebugInformation').text = 'false' + if not isinstance(target, build.StaticLibrary): + if isinstance(target, build.SharedModule): + extra_link_args += compiler.get_std_shared_module_link_args(target.get_options()) + # Add link args added using add_project_link_arguments() + extra_link_args += self.build.get_project_link_args(compiler, target.subproject, target.for_machine) + # Add link args added using add_global_link_arguments() + # These override per-project link arguments + extra_link_args += self.build.get_global_link_args(compiler, target.for_machine) + # Link args added from the env: LDFLAGS, or the cross file. We want + # these to override all the defaults but not the per-target link + # args. + extra_link_args += self.environment.coredata.get_external_link_args( + target.for_machine, compiler.get_language()) + # Only non-static built targets need link args and link dependencies + extra_link_args += target.link_args + # External deps must be last because target link libraries may depend on them. + for dep in target.get_external_deps(): + # Extend without reordering or de-dup to preserve `-L -l` sets + # https://github.com/mesonbuild/meson/issues/1718 + if dep.name == 'openmp': + ET.SubElement(clconf, 'OpenMPSupport').text = 'true' + else: + extra_link_args.extend_direct(dep.get_link_args()) + for d in target.get_dependencies(): + if isinstance(d, build.StaticLibrary): + for dep in d.get_external_deps(): + if dep.name == 'openmp': + ET.SubElement(clconf, 'OpenMPSupport').text = 'true' + else: + extra_link_args.extend_direct(dep.get_link_args()) + # Add link args for c_* or cpp_* build options. Currently this only + # adds c_winlibs and cpp_winlibs when building for Windows. This needs + # to be after all internal and external libraries so that unresolved + # symbols from those can be found here. This is needed when the + # *_winlibs that we want to link to are static mingw64 libraries. + extra_link_args += compiler.get_option_link_args(target.get_options()) + (additional_libpaths, additional_links, extra_link_args) = self.split_link_args(extra_link_args.to_native()) + + # Add more libraries to be linked if needed + for t in target.get_dependencies(): + if isinstance(t, build.CustomTargetIndex): + # We don't need the actual project here, just the library name + lobj = t + else: + lobj = self.build.targets[t.get_id()] + linkname = os.path.join(down, self.get_target_filename_for_linking(lobj)) + if t in target.link_whole_targets: + if compiler.id == 'msvc' and version_compare(compiler.version, '<19.00.23918'): + # Expand our object lists manually if we are on pre-Visual Studio 2015 Update 2 + l = t.extract_all_objects(False) + + # Unfortunately, we can't use self.object_filename_from_source() + for gen in l.genlist: + for src in gen.get_outputs(): + if self.environment.is_source(src): + path = self.get_target_generated_dir(t, gen, src) + gen_src_ext = '.' + os.path.splitext(path)[1][1:] + extra_link_args.append(path[:-len(gen_src_ext)] + '.obj') + + for src in l.srclist: + obj_basename = None + if self.environment.is_source(src): + obj_basename = self.object_filename_from_source(t, src) + target_private_dir = self.relpath(self.get_target_private_dir(t), + self.get_target_dir(t)) + rel_obj = os.path.join(target_private_dir, obj_basename) + extra_link_args.append(rel_obj) + + extra_link_args.extend(self.flatten_object_list(t)) + else: + # /WHOLEARCHIVE:foo must go into AdditionalOptions + extra_link_args += compiler.get_link_whole_for(linkname) + # To force Visual Studio to build this project even though it + # has no sources, we include a reference to the vcxproj file + # that builds this target. Technically we should add this only + # if the current target has no sources, but it doesn't hurt to + # have 'extra' references. + trelpath = self.get_target_dir_relative_to(t, target) + tvcxproj = os.path.join(trelpath, t.get_id() + '.vcxproj') + tid = self.environment.coredata.target_guids[t.get_id()] + self.add_project_reference(root, tvcxproj, tid, link_outputs=True) + # Mark the dependency as already handled to not have + # multiple references to the same target. + self.handled_target_deps[target.get_id()].append(t.get_id()) + else: + # Other libraries go into AdditionalDependencies + if linkname not in additional_links: + additional_links.append(linkname) + for lib in self.get_custom_target_provided_libraries(target): + additional_links.append(self.relpath(lib, self.get_target_dir(target))) + + if len(extra_link_args) > 0: + extra_link_args.append('%(AdditionalOptions)') + ET.SubElement(link, "AdditionalOptions").text = ' '.join(extra_link_args) + if len(additional_libpaths) > 0: + additional_libpaths.insert(0, '%(AdditionalLibraryDirectories)') + ET.SubElement(link, 'AdditionalLibraryDirectories').text = ';'.join(additional_libpaths) + if len(additional_links) > 0: + additional_links.append('%(AdditionalDependencies)') + ET.SubElement(link, 'AdditionalDependencies').text = ';'.join(additional_links) + ofile = ET.SubElement(link, 'OutputFile') + ofile.text = f'$(OutDir){target.get_filename()}' + subsys = ET.SubElement(link, 'SubSystem') + subsys.text = subsystem + if isinstance(target, (build.SharedLibrary, build.Executable)) and target.get_import_filename(): + # DLLs built with MSVC always have an import library except when + # they're data-only DLLs, but we don't support those yet. + ET.SubElement(link, 'ImportLibrary').text = target.get_import_filename() + if isinstance(target, build.SharedLibrary): + # Add module definitions file, if provided + if target.vs_module_defs: + relpath = os.path.join(down, target.vs_module_defs.rel_to_builddir(self.build_to_src)) + ET.SubElement(link, 'ModuleDefinitionFile').text = relpath + if self.debug: + pdb = ET.SubElement(link, 'ProgramDataBaseFileName') + pdb.text = f'$(OutDir){target.name}.pdb' + targetmachine = ET.SubElement(link, 'TargetMachine') + if target.for_machine is MachineChoice.BUILD: + targetplatform = platform.lower() + else: + targetplatform = self.platform.lower() + if targetplatform == 'win32': + targetmachine.text = 'MachineX86' + elif targetplatform == 'x64' or detect_microsoft_gdk(targetplatform): + targetmachine.text = 'MachineX64' + elif targetplatform == 'arm': + targetmachine.text = 'MachineARM' + elif targetplatform == 'arm64': + targetmachine.text = 'MachineARM64' + elif targetplatform == 'arm64ec': + targetmachine.text = 'MachineARM64EC' + else: + raise MesonException('Unsupported Visual Studio target machine: ' + targetplatform) + # /nologo + ET.SubElement(link, 'SuppressStartupBanner').text = 'true' + # /release + if not target.get_option(OptionKey('debug')): + ET.SubElement(link, 'SetChecksum').text = 'true' + + # Visual studio doesn't simply allow the src files of a project to be added with the 'Condition=...' attribute, + # to allow us to point to the different debug/debugoptimized/release sets of generated src files for each of + # the solution's configurations. Similarly, 'ItemGroup' also doesn't support 'Condition'. So, without knowing + # a better (simple) alternative, for now, we'll repoint these generated sources (which will be incorrectly + # pointing to non-existent files under our '[builddir]_vs' directory) to the appropriate location under one of + # our buildtype build directores (e.g. '[builddir]_debug'). + # This will at least allow the user to open the files of generated sources listed in the solution explorer, + # once a build/compile has generated these sources. + # + # This modifies the paths in 'gen_files' in place, as opposed to returning a new list of modified paths. + def relocate_generated_file_paths_to_concrete_build_dir(self, gen_files: T.List[str], target: T.Union[build.Target, build.CustomTargetIndex]) -> None: + (_, build_dir_tail) = os.path.split(self.src_to_build) + meson_build_dir_for_buildtype = build_dir_tail[:-2] + coredata.get_genvs_default_buildtype_list()[0] # Get the first buildtype suffixed dir (i.e. '[builddir]_debug') from '[builddir]_vs' + # Relative path from this .vcxproj to the directory containing the set of '..._[debug/debugoptimized/release]' setup meson build dirs. + proj_to_build_root = self.target_to_build_root(target) + proj_to_multiconfigured_builds_parent_dir = os.path.join(proj_to_build_root, '..') + proj_to_build_dir_for_buildtype = str(os.path.join(proj_to_multiconfigured_builds_parent_dir, meson_build_dir_for_buildtype)) + relocate_to_concrete_builddir_target = os.path.normpath(os.path.join(proj_to_build_dir_for_buildtype, self.get_target_dir(target))) + for idx, file_path in enumerate(gen_files): + gen_files[idx] = os.path.normpath(os.path.join(relocate_to_concrete_builddir_target, file_path)) + + # Returns bool indicating whether the .vcxproj has been generated. + # Under some circumstances, it's unnecessary to create some .vcxprojs, so, when generating the .sln, + # we need to respect that not all targets will have generated a project. + def gen_vcxproj(self, target: build.BuildTarget, ofname: str, guid: str, vslite_ctx: dict = None) -> bool: + mlog.debug(f'Generating vcxproj {target.name}.') + subsystem = 'Windows' + self.handled_target_deps[target.get_id()] = [] + + if self.gen_lite: + if not isinstance(target, build.BuildTarget): + # Since we're going to delegate all building to the one true meson build command, we don't need + # to generate .vcxprojs for targets that don't add any source files or just perform custom build + # commands. These are targets of types CustomTarget or RunTarget. So let's just skip generating + # these otherwise insubstantial non-BuildTarget targets. + return False + conftype = 'Makefile' + elif isinstance(target, build.Executable): + conftype = 'Application' + if target.gui_app is not None: + if not target.gui_app: + subsystem = 'Console' + else: + # If someone knows how to set the version properly, + # please send a patch. + subsystem = target.win_subsystem.split(',')[0] + elif isinstance(target, build.StaticLibrary): + conftype = 'StaticLibrary' + elif isinstance(target, build.SharedLibrary): + conftype = 'DynamicLibrary' + elif isinstance(target, build.CustomTarget): + self.gen_custom_target_vcxproj(target, ofname, guid) + return True + elif isinstance(target, build.RunTarget): + self.gen_run_target_vcxproj(target, ofname, guid) + return True + elif isinstance(target, build.CompileTarget): + self.gen_compile_target_vcxproj(target, ofname, guid) + return True + else: + raise MesonException(f'Unknown target type for {target.get_basename()}') + + (sources, headers, objects, _languages) = self.split_sources(target.sources) + if target.is_unity: + sources = self.generate_unity_files(target, sources) + if target.for_machine is MachineChoice.BUILD: + platform = self.build_platform + else: + platform = self.platform + + tfilename = os.path.splitext(target.get_filename()) + + (root, type_config) = self.create_basic_project(tfilename[0], + temp_dir=target.get_id(), + guid=guid, + conftype=conftype, + target_ext=tfilename[1], + target_platform=platform) + + # vcxproj.filters file + root_filter = self.create_basic_project_filters() + + generated_files, custom_target_output_files, generated_files_include_dirs = self.generate_custom_generator_commands( + target, root) + (gen_src, gen_hdrs, gen_objs, _gen_langs) = self.split_sources(generated_files) + (custom_src, custom_hdrs, custom_objs, _custom_langs) = self.split_sources(custom_target_output_files) + gen_src += custom_src + gen_hdrs += custom_hdrs + + compiler = self._get_cl_compiler(target) + build_args = Vs2010Backend.get_build_args(compiler, self.buildtype, self.optimization, self.debug, self.sanitize) + + assert isinstance(target, (build.Executable, build.SharedLibrary, build.StaticLibrary, build.SharedModule)), 'for mypy' + # Prefix to use to access the build root from the vcxproj dir + proj_to_build_root = self.target_to_build_root(target) + # Prefix to use to access the source tree's root from the vcxproj dir + proj_to_src_root = os.path.join(proj_to_build_root, self.build_to_src) + # Prefix to use to access the source tree's subdir from the vcxproj dir + proj_to_src_dir = os.path.join(proj_to_src_root, self.get_target_dir(target)) + + (target_args, file_args), (target_defines, file_defines), (target_inc_dirs, file_inc_dirs) = self.get_args_defines_and_inc_dirs( + target, compiler, generated_files_include_dirs, proj_to_src_root, proj_to_src_dir, build_args) + + if self.gen_lite: + assert vslite_ctx is not None + primary_src_lang = get_primary_source_lang(target.sources, custom_src) + self.add_gen_lite_makefile_vcxproj_elements(root, platform, tfilename[1], vslite_ctx, target, proj_to_build_root, primary_src_lang) + else: + self.add_non_makefile_vcxproj_elements(root, type_config, target, platform, subsystem, build_args, target_args, target_defines, target_inc_dirs, file_args) + + meson_file_group = ET.SubElement(root, 'ItemGroup') + ET.SubElement(meson_file_group, 'None', Include=os.path.join(proj_to_src_dir, build_filename)) + + # Visual Studio can't load projects that present duplicated items. Filter them out + # by keeping track of already added paths. + def path_normalize_add(path, lis): + normalized = os.path.normcase(os.path.normpath(path)) + if normalized not in lis: + lis.append(normalized) + return True + else: + return False + + pch_sources = {} + if self.target_uses_pch(target): + for lang in ['c', 'cpp']: + pch = target.get_pch(lang) + if not pch: + continue + if compiler.id == 'msvc': + if len(pch) == 1: + # Auto generate PCH. + src = os.path.join(proj_to_build_root, self.create_msvc_pch_implementation(target, lang, pch[0])) + pch_header_dir = os.path.dirname(os.path.join(proj_to_src_dir, pch[0])) + else: + src = os.path.join(proj_to_src_dir, pch[1]) + pch_header_dir = None + pch_sources[lang] = [pch[0], src, lang, pch_header_dir] + else: + # I don't know whether its relevant but let's handle other compilers + # used with a vs backend + pch_sources[lang] = [pch[0], None, lang, None] + + list_filters_path = set() + + previous_includes = [] + if len(headers) + len(gen_hdrs) + len(target.extra_files) + len(pch_sources) > 0: + if self.gen_lite and gen_hdrs: + # Although we're constructing our .vcxproj under our '..._vs' directory, we want to reference generated files + # in our concrete build directories (e.g. '..._debug'), where generated files will exist after building. + self.relocate_generated_file_paths_to_concrete_build_dir(gen_hdrs, target) + + # Filter information + filter_group_include = ET.SubElement(root_filter, 'ItemGroup') + + inc_hdrs = ET.SubElement(root, 'ItemGroup') + for h in headers: + relpath = os.path.join(proj_to_build_root, h.rel_to_builddir(self.build_to_src)) + if path_normalize_add(relpath, previous_includes): + self.add_filter_info(list_filters_path, filter_group_include, 'ClInclude', relpath, h.subdir) + ET.SubElement(inc_hdrs, 'CLInclude', Include=relpath) + for h in gen_hdrs: + if path_normalize_add(h, previous_includes): + self.add_filter_info(list_filters_path, filter_group_include, 'ClInclude', h) + ET.SubElement(inc_hdrs, 'CLInclude', Include=h) + for h in target.extra_files: + relpath = os.path.join(proj_to_build_root, h.rel_to_builddir(self.build_to_src)) + if path_normalize_add(relpath, previous_includes): + self.add_filter_info(list_filters_path, filter_group_include, 'ClInclude', relpath, h.subdir) + ET.SubElement(inc_hdrs, 'CLInclude', Include=relpath) + for headers in pch_sources.values(): + path = os.path.join(proj_to_src_dir, headers[0]) + if path_normalize_add(path, previous_includes): + self.add_filter_info(list_filters_path, filter_group_include, 'ClInclude', path, 'pch') + ET.SubElement(inc_hdrs, 'CLInclude', Include=path) + + previous_sources = [] + if len(sources) + len(gen_src) + len(pch_sources) > 0: + if self.gen_lite: + # Get data to fill in intellisense fields for sources that can't reference the project-wide values + defs_paths_opts_per_lang_and_buildtype = get_non_primary_lang_intellisense_fields( + vslite_ctx, + target.get_id(), + primary_src_lang) + if gen_src: + # Although we're constructing our .vcxproj under our '..._vs' directory, we want to reference generated files + # in our concrete build directories (e.g. '..._debug'), where generated files will exist after building. + self.relocate_generated_file_paths_to_concrete_build_dir(gen_src, target) + + # Filter information + filter_group_compile = ET.SubElement(root_filter, 'ItemGroup') + + inc_src = ET.SubElement(root, 'ItemGroup') + for s in sources: + relpath = os.path.join(proj_to_build_root, s.rel_to_builddir(self.build_to_src)) + if path_normalize_add(relpath, previous_sources): + self.add_filter_info(list_filters_path, filter_group_compile, 'CLCompile', relpath, s.subdir) + inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=relpath) + if self.gen_lite: + self.add_project_nmake_defs_incs_and_opts(inc_cl, relpath, defs_paths_opts_per_lang_and_buildtype, platform) + else: + lang = Vs2010Backend.lang_from_source_file(s) + self.add_pch(pch_sources, lang, inc_cl) + self.add_additional_options(lang, inc_cl, file_args) + self.add_preprocessor_defines(lang, inc_cl, file_defines) + self.add_include_dirs(lang, inc_cl, file_inc_dirs) + ET.SubElement(inc_cl, 'ObjectFileName').text = "$(IntDir)" + \ + self.object_filename_from_source(target, s) + for s in gen_src: + if path_normalize_add(s, previous_sources): + self.add_filter_info(list_filters_path, filter_group_compile, 'CLCompile', s) + inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=s) + if self.gen_lite: + self.add_project_nmake_defs_incs_and_opts(inc_cl, s, defs_paths_opts_per_lang_and_buildtype, platform) + else: + lang = Vs2010Backend.lang_from_source_file(s) + self.add_pch(pch_sources, lang, inc_cl) + self.add_additional_options(lang, inc_cl, file_args) + self.add_preprocessor_defines(lang, inc_cl, file_defines) + self.add_include_dirs(lang, inc_cl, file_inc_dirs) + s = File.from_built_file(target.get_subdir(), s) + ET.SubElement(inc_cl, 'ObjectFileName').text = "$(IntDir)" + \ + self.object_filename_from_source(target, s) + for lang, headers in pch_sources.items(): + impl = headers[1] + if impl and path_normalize_add(impl, previous_sources): + self.add_filter_info(list_filters_path, filter_group_compile, 'CLCompile', impl, 'pch') + inc_cl = ET.SubElement(inc_src, 'CLCompile', Include=impl) + self.create_pch(pch_sources, lang, inc_cl) + if self.gen_lite: + self.add_project_nmake_defs_incs_and_opts(inc_cl, impl, defs_paths_opts_per_lang_and_buildtype, platform) + else: + self.add_additional_options(lang, inc_cl, file_args) + self.add_preprocessor_defines(lang, inc_cl, file_defines) + pch_header_dir = pch_sources[lang][3] + if pch_header_dir: + inc_dirs = copy.deepcopy(file_inc_dirs) + inc_dirs[lang] = [pch_header_dir] + inc_dirs[lang] + else: + inc_dirs = file_inc_dirs + self.add_include_dirs(lang, inc_cl, inc_dirs) + # XXX: Do we need to set the object file name here too? + + # Filter information + filter_group = ET.SubElement(root_filter, 'ItemGroup') + for filter_dir in list_filters_path: + filter = ET.SubElement(filter_group, 'Filter', Include=filter_dir) + ET.SubElement(filter, 'UniqueIdentifier').text = '{' + str(uuid.uuid4()) + '}' + + additional_objects = [] + for o in self.flatten_object_list(target, proj_to_build_root)[0]: + assert isinstance(o, str) + additional_objects.append(o) + for o in custom_objs: + additional_objects.append(o) + + previous_objects = [] + if self.has_objects(objects, additional_objects, gen_objs): + inc_objs = ET.SubElement(root, 'ItemGroup') + for s in objects: + relpath = os.path.join(proj_to_build_root, s.rel_to_builddir(self.build_to_src)) + if path_normalize_add(relpath, previous_objects): + ET.SubElement(inc_objs, 'Object', Include=relpath) + for s in additional_objects: + if path_normalize_add(s, previous_objects): + ET.SubElement(inc_objs, 'Object', Include=s) + self.add_generated_objects(inc_objs, gen_objs) + + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets') + self.add_regen_dependency(root) + if not self.gen_lite: + # Injecting further target dependencies into this vcxproj implies and forces a Visual Studio BUILD dependency, + # which we don't want when using 'genvslite'. A gen_lite build as little involvement with the visual studio's + # build system as possible. + self.add_target_deps(root, target) + self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname) + self._prettyprint_vcxproj_xml(ET.ElementTree(root_filter), ofname + '.filters') + return True + + def gen_regenproj(self): + # To fully adapt the REGEN work for a 'genvslite' solution, to check timestamps, settings, and regenerate the + # '[builddir]_vs' solution/vcxprojs, as well as regenerating the accompanying buildtype-suffixed ninja build + # directories (from which we need to first collect correct, updated preprocessor defs and compiler options in + # order to fill in the regenerated solution's intellisense settings) would require some non-trivial intrusion + # into the 'meson --internal regencheck ./meson-private' execution path (and perhaps also the '--internal + # regenerate' and even 'meson setup --reconfigure' code). So, for now, we'll instead give the user a simpler + # 'reconfigure' utility project that just runs 'meson setup --reconfigure [builddir]_[buildtype] [srcdir]' on + # each of the ninja build dirs. + # + # FIXME: That will keep the building and compiling correctly configured but obviously won't update the + # solution and vcxprojs, which may allow solution src files and intellisense options to go out-of-date; the + # user would still have to manually 'meson setup --genvslite [vsxxxx] [builddir] [srcdir]' to fully regenerate + # a complete and correct solution. + if self.gen_lite: + project_name = 'RECONFIGURE' + ofname = os.path.join(self.environment.get_build_dir(), 'RECONFIGURE.vcxproj') + conftype = 'Makefile' + # I find the REGEN project doesn't work; it fails to invoke the appropriate - + # python meson.py --internal regencheck builddir\meson-private + # command, despite the fact that manually running such a command in a shell runs just fine. + # Running/building the regen project produces the error - + # ...Microsoft.CppBuild.targets(460,5): error MSB8020: The build tools for ClangCL (Platform Toolset = 'ClangCL') cannot be found. To build using the ClangCL build tools, please install ... + # Not sure why but a simple makefile-style project that executes the full '...regencheck...' command actually works (and seems a little simpler). + # Although I've limited this change to only happen under '--genvslite', perhaps ... + # FIXME : Should all utility projects use the simpler and less problematic makefile-style project? + else: + project_name = 'REGEN' + ofname = os.path.join(self.environment.get_build_dir(), 'REGEN.vcxproj') + conftype = 'Utility' + + guid = self.environment.coredata.regen_guid + (root, type_config) = self.create_basic_project(project_name, + temp_dir='regen-temp', + guid=guid, + conftype=conftype + ) + + if self.gen_lite: + (nmake_base_meson_command, exe_search_paths) = Vs2010Backend.get_nmake_base_meson_command_and_exe_search_paths() + all_configs_prop_group = ET.SubElement(root, 'PropertyGroup') + + # Multi-line command to reconfigure all buildtype-suffixed build dirs + multi_config_buildtype_list = coredata.get_genvs_default_buildtype_list() + (_, build_dir_tail) = os.path.split(self.src_to_build) + proj_to_multiconfigured_builds_parent_dir = '..' # We know this RECONFIGURE.vcxproj will always be in the '[buildir]_vs' dir. + proj_to_src_dir = self.build_to_src + reconfigure_all_cmd = '' + for buildtype in multi_config_buildtype_list: + meson_build_dir_for_buildtype = build_dir_tail[:-2] + buildtype # Get the buildtype suffixed 'builddir_[debug/release/etc]' from 'builddir_vs', for example. + proj_to_build_dir_for_buildtype = str(os.path.join(proj_to_multiconfigured_builds_parent_dir, meson_build_dir_for_buildtype)) + reconfigure_all_cmd += f'{nmake_base_meson_command} setup --reconfigure "{proj_to_build_dir_for_buildtype}" "{proj_to_src_dir}"\n' + ET.SubElement(all_configs_prop_group, 'NMakeBuildCommandLine').text = reconfigure_all_cmd + ET.SubElement(all_configs_prop_group, 'NMakeReBuildCommandLine').text = reconfigure_all_cmd + ET.SubElement(all_configs_prop_group, 'NMakeCleanCommandLine').text = '' + + #Need to set the 'ExecutablePath' element for the above NMake... commands to be able to execute + ET.SubElement(all_configs_prop_group, 'ExecutablePath').text = exe_search_paths + else: + action = ET.SubElement(root, 'ItemDefinitionGroup') + midl = ET.SubElement(action, 'Midl') + ET.SubElement(midl, "AdditionalIncludeDirectories").text = '%(AdditionalIncludeDirectories)' + ET.SubElement(midl, "OutputDirectory").text = '$(IntDir)' + ET.SubElement(midl, 'HeaderFileName').text = '%(Filename).h' + ET.SubElement(midl, 'TypeLibraryName').text = '%(Filename).tlb' + ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c' + ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c' + regen_command = self.environment.get_build_command() + ['--internal', 'regencheck'] + cmd_templ = '''call %s > NUL +"%s" "%s"''' + regen_command = cmd_templ % \ + (self.get_vcvars_command(), '" "'.join(regen_command), self.environment.get_scratch_dir()) + self.add_custom_build(root, 'regen', regen_command, deps=self.get_regen_filelist(), + outputs=[Vs2010Backend.get_regen_stampfile(self.environment.get_build_dir())], + msg='Checking whether solution needs to be regenerated.') + + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets') + ET.SubElement(root, 'ImportGroup', Label='ExtensionTargets') + self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname) + + def gen_testproj(self): + project_name = 'RUN_TESTS' + ofname = os.path.join(self.environment.get_build_dir(), f'{project_name}.vcxproj') + guid = self.environment.coredata.test_guid + if self.gen_lite: + (root, type_config) = self.create_basic_project(project_name, + temp_dir='install-temp', + guid=guid, + conftype='Makefile' + ) + (nmake_base_meson_command, exe_search_paths) = Vs2010Backend.get_nmake_base_meson_command_and_exe_search_paths() + multi_config_buildtype_list = coredata.get_genvs_default_buildtype_list() + (_, build_dir_tail) = os.path.split(self.src_to_build) + proj_to_multiconfigured_builds_parent_dir = '..' # We know this .vcxproj will always be in the '[buildir]_vs' dir. + # Add appropriate 'test' commands for the 'build' action of this project, for all buildtypes + for buildtype in multi_config_buildtype_list: + meson_build_dir_for_buildtype = build_dir_tail[:-2] + buildtype # Get the buildtype suffixed 'builddir_[debug/release/etc]' from 'builddir_vs', for example. + proj_to_build_dir_for_buildtype = str(os.path.join(proj_to_multiconfigured_builds_parent_dir, meson_build_dir_for_buildtype)) + test_cmd = f'{nmake_base_meson_command} test -C "{proj_to_build_dir_for_buildtype}" --no-rebuild' + if not self.environment.coredata.get_option(OptionKey('stdsplit')): + test_cmd += ' --no-stdsplit' + if self.environment.coredata.get_option(OptionKey('errorlogs')): + test_cmd += ' --print-errorlogs' + condition = f'\'$(Configuration)|$(Platform)\'==\'{buildtype}|{self.platform}\'' + prop_group = ET.SubElement(root, 'PropertyGroup', Condition=condition) + ET.SubElement(prop_group, 'NMakeBuildCommandLine').text = test_cmd + #Need to set the 'ExecutablePath' element for the NMake... commands to be able to execute + ET.SubElement(prop_group, 'ExecutablePath').text = exe_search_paths + else: + (root, type_config) = self.create_basic_project(project_name, + temp_dir='test-temp', + guid=guid) + + action = ET.SubElement(root, 'ItemDefinitionGroup') + midl = ET.SubElement(action, 'Midl') + ET.SubElement(midl, "AdditionalIncludeDirectories").text = '%(AdditionalIncludeDirectories)' + ET.SubElement(midl, "OutputDirectory").text = '$(IntDir)' + ET.SubElement(midl, 'HeaderFileName').text = '%(Filename).h' + ET.SubElement(midl, 'TypeLibraryName').text = '%(Filename).tlb' + ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c' + ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c' + # FIXME: No benchmarks? + test_command = self.environment.get_build_command() + ['test', '--no-rebuild'] + if not self.environment.coredata.get_option(OptionKey('stdsplit')): + test_command += ['--no-stdsplit'] + if self.environment.coredata.get_option(OptionKey('errorlogs')): + test_command += ['--print-errorlogs'] + self.serialize_tests() + self.add_custom_build(root, 'run_tests', '"%s"' % ('" "'.join(test_command))) + + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets') + self.add_regen_dependency(root) + self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname) + + def gen_installproj(self): + project_name = 'RUN_INSTALL' + ofname = os.path.join(self.environment.get_build_dir(), f'{project_name}.vcxproj') + guid = self.environment.coredata.install_guid + if self.gen_lite: + (root, type_config) = self.create_basic_project(project_name, + temp_dir='install-temp', + guid=guid, + conftype='Makefile' + ) + (nmake_base_meson_command, exe_search_paths) = Vs2010Backend.get_nmake_base_meson_command_and_exe_search_paths() + multi_config_buildtype_list = coredata.get_genvs_default_buildtype_list() + (_, build_dir_tail) = os.path.split(self.src_to_build) + proj_to_multiconfigured_builds_parent_dir = '..' # We know this .vcxproj will always be in the '[buildir]_vs' dir. + # Add appropriate 'install' commands for the 'build' action of this project, for all buildtypes + for buildtype in multi_config_buildtype_list: + meson_build_dir_for_buildtype = build_dir_tail[:-2] + buildtype # Get the buildtype suffixed 'builddir_[debug/release/etc]' from 'builddir_vs', for example. + proj_to_build_dir_for_buildtype = str(os.path.join(proj_to_multiconfigured_builds_parent_dir, meson_build_dir_for_buildtype)) + install_cmd = f'{nmake_base_meson_command} install -C "{proj_to_build_dir_for_buildtype}" --no-rebuild' + condition = f'\'$(Configuration)|$(Platform)\'==\'{buildtype}|{self.platform}\'' + prop_group = ET.SubElement(root, 'PropertyGroup', Condition=condition) + ET.SubElement(prop_group, 'NMakeBuildCommandLine').text = install_cmd + #Need to set the 'ExecutablePath' element for the NMake... commands to be able to execute + ET.SubElement(prop_group, 'ExecutablePath').text = exe_search_paths + else: + self.create_install_data_files() + + (root, type_config) = self.create_basic_project(project_name, + temp_dir='install-temp', + guid=guid) + + action = ET.SubElement(root, 'ItemDefinitionGroup') + midl = ET.SubElement(action, 'Midl') + ET.SubElement(midl, "AdditionalIncludeDirectories").text = '%(AdditionalIncludeDirectories)' + ET.SubElement(midl, "OutputDirectory").text = '$(IntDir)' + ET.SubElement(midl, 'HeaderFileName').text = '%(Filename).h' + ET.SubElement(midl, 'TypeLibraryName').text = '%(Filename).tlb' + ET.SubElement(midl, 'InterfaceIdentifierFilename').text = '%(Filename)_i.c' + ET.SubElement(midl, 'ProxyFileName').text = '%(Filename)_p.c' + install_command = self.environment.get_build_command() + ['install', '--no-rebuild'] + self.add_custom_build(root, 'run_install', '"%s"' % ('" "'.join(install_command))) + + ET.SubElement(root, 'Import', Project=r'$(VCTargetsPath)\Microsoft.Cpp.targets') + self.add_regen_dependency(root) + self._prettyprint_vcxproj_xml(ET.ElementTree(root), ofname) + + def add_custom_build(self, node: ET.Element, rulename: str, command: str, deps: T.Optional[T.List[str]] = None, + outputs: T.Optional[T.List[str]] = None, msg: T.Optional[str] = None, verify_files: bool = True) -> None: + igroup = ET.SubElement(node, 'ItemGroup') + rulefile = os.path.join(self.environment.get_scratch_dir(), rulename + '.rule') + if not os.path.exists(rulefile): + with open(rulefile, 'w', encoding='utf-8') as f: + f.write("# Meson regen file.") + custombuild = ET.SubElement(igroup, 'CustomBuild', Include=rulefile) + if msg: + message = ET.SubElement(custombuild, 'Message') + message.text = msg + if not verify_files: + ET.SubElement(custombuild, 'VerifyInputsAndOutputsExist').text = 'false' + + # If a command ever were to change the current directory or set local + # variables this would need to be more complicated, as msbuild by + # default executes all CustomBuilds in a project using the same + # shell. Right now such tasks are all done inside the meson_exe + # wrapper. The trailing newline appears to be necessary to allow + # parallel custom builds to work. + ET.SubElement(custombuild, 'Command').text = f"{command}\n" + + if not outputs: + # Use a nonexistent file to always consider the target out-of-date. + outputs = [self.nonexistent_file(os.path.join(self.environment.get_scratch_dir(), + 'outofdate.file'))] + ET.SubElement(custombuild, 'Outputs').text = ';'.join(outputs) + if deps: + ET.SubElement(custombuild, 'AdditionalInputs').text = ';'.join(deps) + + @staticmethod + def nonexistent_file(prefix: str) -> str: + i = 0 + file = prefix + while os.path.exists(file): + file = '%s%d' % (prefix, i) + return file + + def generate_debug_information(self, link: ET.Element) -> None: + # valid values for vs2015 is 'false', 'true', 'DebugFastLink' + ET.SubElement(link, 'GenerateDebugInformation').text = 'true' + + def add_regen_dependency(self, root: ET.Element) -> None: + # For now, with 'genvslite' solutions, REGEN is replaced by the lighter-weight RECONFIGURE utility that is + # no longer a forced build dependency. See comment in 'gen_regenproj' + if not self.gen_lite: + regen_vcxproj = os.path.join(self.environment.get_build_dir(), 'REGEN.vcxproj') + self.add_project_reference(root, regen_vcxproj, self.environment.coredata.regen_guid) + + def generate_lang_standard_info(self, file_args: T.Dict[str, CompilerArgs], clconf: ET.Element) -> None: + pass diff --git a/vendored-meson/meson/mesonbuild/backend/vs2012backend.py b/vendored-meson/meson/mesonbuild/backend/vs2012backend.py new file mode 100644 index 000000000000..76e5c40b0824 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/backend/vs2012backend.py @@ -0,0 +1,45 @@ +# Copyright 2014-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import typing as T + +from .vs2010backend import Vs2010Backend +from ..mesonlib import MesonException + +if T.TYPE_CHECKING: + from ..build import Build + from ..interpreter import Interpreter + +class Vs2012Backend(Vs2010Backend): + + name = 'vs2012' + + def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) + self.vs_version = '2012' + self.sln_file_version = '12.00' + self.sln_version_comment = '2012' + if self.environment is not None: + # TODO: we assume host == build + comps = self.environment.coredata.compilers.host + if comps and all(c.id == 'intel-cl' for c in comps.values()): + c = list(comps.values())[0] + if c.version.startswith('19'): + self.platform_toolset = 'Intel C++ Compiler 19.0' + else: + # We don't have support for versions older than 2019 right now. + raise MesonException('There is currently no support for ICL before 19, patches welcome.') + if self.platform_toolset is None: + self.platform_toolset = 'v110' diff --git a/vendored-meson/meson/mesonbuild/backend/vs2013backend.py b/vendored-meson/meson/mesonbuild/backend/vs2013backend.py new file mode 100644 index 000000000000..1fbde46ce4db --- /dev/null +++ b/vendored-meson/meson/mesonbuild/backend/vs2013backend.py @@ -0,0 +1,44 @@ +# Copyright 2014-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from .vs2010backend import Vs2010Backend +from ..mesonlib import MesonException +import typing as T + +if T.TYPE_CHECKING: + from ..build import Build + from ..interpreter import Interpreter + +class Vs2013Backend(Vs2010Backend): + + name = 'vs2013' + + def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) + self.vs_version = '2013' + self.sln_file_version = '12.00' + self.sln_version_comment = '2013' + if self.environment is not None: + # TODO: we assume host == build + comps = self.environment.coredata.compilers.host + if comps and all(c.id == 'intel-cl' for c in comps.values()): + c = list(comps.values())[0] + if c.version.startswith('19'): + self.platform_toolset = 'Intel C++ Compiler 19.0' + else: + # We don't have support for versions older than 2019 right now. + raise MesonException('There is currently no support for ICL before 19, patches welcome.') + if self.platform_toolset is None: + self.platform_toolset = 'v120' diff --git a/vendored-meson/meson/mesonbuild/backend/vs2015backend.py b/vendored-meson/meson/mesonbuild/backend/vs2015backend.py new file mode 100644 index 000000000000..8e4da363149a --- /dev/null +++ b/vendored-meson/meson/mesonbuild/backend/vs2015backend.py @@ -0,0 +1,45 @@ +# Copyright 2014-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import typing as T + +from .vs2010backend import Vs2010Backend +from ..mesonlib import MesonException + +if T.TYPE_CHECKING: + from ..build import Build + from ..interpreter import Interpreter + +class Vs2015Backend(Vs2010Backend): + + name = 'vs2015' + + def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) + self.vs_version = '2015' + self.sln_file_version = '12.00' + self.sln_version_comment = '14' + if self.environment is not None: + # TODO: we assume host == build + comps = self.environment.coredata.compilers.host + if comps and all(c.id == 'intel-cl' for c in comps.values()): + c = list(comps.values())[0] + if c.version.startswith('19'): + self.platform_toolset = 'Intel C++ Compiler 19.0' + else: + # We don't have support for versions older than 2019 right now. + raise MesonException('There is currently no support for ICL before 19, patches welcome.') + if self.platform_toolset is None: + self.platform_toolset = 'v140' diff --git a/vendored-meson/meson/mesonbuild/backend/vs2017backend.py b/vendored-meson/meson/mesonbuild/backend/vs2017backend.py new file mode 100644 index 000000000000..375d660e35d4 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/backend/vs2017backend.py @@ -0,0 +1,69 @@ +# Copyright 2014-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import os +import typing as T +import xml.etree.ElementTree as ET + +from .vs2010backend import Vs2010Backend +from ..mesonlib import MesonException + +if T.TYPE_CHECKING: + from ..build import Build + from ..interpreter import Interpreter + + +class Vs2017Backend(Vs2010Backend): + + name = 'vs2017' + + def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) + self.vs_version = '2017' + self.sln_file_version = '12.00' + self.sln_version_comment = '15' + # We assume that host == build + if self.environment is not None: + comps = self.environment.coredata.compilers.host + if comps: + if comps and all(c.id == 'clang-cl' for c in comps.values()): + self.platform_toolset = 'llvm' + elif comps and all(c.id == 'intel-cl' for c in comps.values()): + c = list(comps.values())[0] + if c.version.startswith('19'): + self.platform_toolset = 'Intel C++ Compiler 19.0' + else: + # We don't have support for versions older than 2019 right now. + raise MesonException('There is currently no support for ICL before 19, patches welcome.') + if self.platform_toolset is None: + self.platform_toolset = 'v141' + # WindowsSDKVersion should be set by command prompt. + sdk_version = os.environ.get('WindowsSDKVersion', None) + if sdk_version: + self.windows_target_platform_version = sdk_version.rstrip('\\') + + def generate_debug_information(self, link): + # valid values for vs2017 is 'false', 'true', 'DebugFastLink', 'DebugFull' + ET.SubElement(link, 'GenerateDebugInformation').text = 'DebugFull' + + def generate_lang_standard_info(self, file_args, clconf): + if 'cpp' in file_args: + optargs = [x for x in file_args['cpp'] if x.startswith('/std:c++')] + if optargs: + ET.SubElement(clconf, 'LanguageStandard').text = optargs[0].replace("/std:c++", "stdcpp") + if 'c' in file_args: + optargs = [x for x in file_args['c'] if x.startswith('/std:c')] + if optargs: + ET.SubElement(clconf, 'LanguageStandard_C').text = optargs[0].replace("/std:c", "stdc") diff --git a/vendored-meson/meson/mesonbuild/backend/vs2019backend.py b/vendored-meson/meson/mesonbuild/backend/vs2019backend.py new file mode 100644 index 000000000000..f01f7eceb8ca --- /dev/null +++ b/vendored-meson/meson/mesonbuild/backend/vs2019backend.py @@ -0,0 +1,64 @@ +# Copyright 2014-2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import os +import typing as T +import xml.etree.ElementTree as ET + +from .vs2010backend import Vs2010Backend + +if T.TYPE_CHECKING: + from ..build import Build + from ..interpreter import Interpreter + + +class Vs2019Backend(Vs2010Backend): + + name = 'vs2019' + + def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) + self.sln_file_version = '12.00' + self.sln_version_comment = 'Version 16' + if self.environment is not None: + comps = self.environment.coredata.compilers.host + if comps and all(c.id == 'clang-cl' for c in comps.values()): + self.platform_toolset = 'ClangCL' + elif comps and all(c.id == 'intel-cl' for c in comps.values()): + c = list(comps.values())[0] + if c.version.startswith('19'): + self.platform_toolset = 'Intel C++ Compiler 19.0' + # We don't have support for versions older than 2019 right now. + if not self.platform_toolset: + self.platform_toolset = 'v142' + self.vs_version = '2019' + # WindowsSDKVersion should be set by command prompt. + sdk_version = os.environ.get('WindowsSDKVersion', None) + if sdk_version: + self.windows_target_platform_version = sdk_version.rstrip('\\') + + def generate_debug_information(self, link): + # valid values for vs2019 is 'false', 'true', 'DebugFastLink', 'DebugFull' + ET.SubElement(link, 'GenerateDebugInformation').text = 'DebugFull' + + def generate_lang_standard_info(self, file_args, clconf): + if 'cpp' in file_args: + optargs = [x for x in file_args['cpp'] if x.startswith('/std:c++')] + if optargs: + ET.SubElement(clconf, 'LanguageStandard').text = optargs[0].replace("/std:c++", "stdcpp") + if 'c' in file_args: + optargs = [x for x in file_args['c'] if x.startswith('/std:c')] + if optargs: + ET.SubElement(clconf, 'LanguageStandard_C').text = optargs[0].replace("/std:c", "stdc") diff --git a/vendored-meson/meson/mesonbuild/backend/vs2022backend.py b/vendored-meson/meson/mesonbuild/backend/vs2022backend.py new file mode 100644 index 000000000000..ea715d87d8e8 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/backend/vs2022backend.py @@ -0,0 +1,64 @@ +# Copyright 2014-2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import os +import typing as T +import xml.etree.ElementTree as ET + +from .vs2010backend import Vs2010Backend + +if T.TYPE_CHECKING: + from ..build import Build + from ..interpreter import Interpreter + + +class Vs2022Backend(Vs2010Backend): + + name = 'vs2022' + + def __init__(self, build: T.Optional[Build], interpreter: T.Optional[Interpreter], gen_lite: bool = False): + super().__init__(build, interpreter, gen_lite=gen_lite) + self.sln_file_version = '12.00' + self.sln_version_comment = 'Version 17' + if self.environment is not None: + comps = self.environment.coredata.compilers.host + if comps and all(c.id == 'clang-cl' for c in comps.values()): + self.platform_toolset = 'ClangCL' + elif comps and all(c.id == 'intel-cl' for c in comps.values()): + c = list(comps.values())[0] + if c.version.startswith('19'): + self.platform_toolset = 'Intel C++ Compiler 19.0' + # We don't have support for versions older than 2022 right now. + if not self.platform_toolset: + self.platform_toolset = 'v143' + self.vs_version = '2022' + # WindowsSDKVersion should be set by command prompt. + sdk_version = os.environ.get('WindowsSDKVersion', None) + if sdk_version: + self.windows_target_platform_version = sdk_version.rstrip('\\') + + def generate_debug_information(self, link): + # valid values for vs2022 is 'false', 'true', 'DebugFastLink', 'DebugFull' + ET.SubElement(link, 'GenerateDebugInformation').text = 'DebugFull' + + def generate_lang_standard_info(self, file_args, clconf): + if 'cpp' in file_args: + optargs = [x for x in file_args['cpp'] if x.startswith('/std:c++')] + if optargs: + ET.SubElement(clconf, 'LanguageStandard').text = optargs[0].replace("/std:c++", "stdcpp") + if 'c' in file_args: + optargs = [x for x in file_args['c'] if x.startswith('/std:c')] + if optargs: + ET.SubElement(clconf, 'LanguageStandard_C').text = optargs[0].replace("/std:c", "stdc") diff --git a/vendored-meson/meson/mesonbuild/backend/xcodebackend.py b/vendored-meson/meson/mesonbuild/backend/xcodebackend.py new file mode 100644 index 000000000000..bc4ba0bc21d6 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/backend/xcodebackend.py @@ -0,0 +1,1718 @@ +# Copyright 2014-2021 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import uuid, os, operator +import typing as T + +from . import backends +from .. import build +from .. import dependencies +from .. import mesonlib +from .. import mlog +from ..mesonlib import MesonBugException, MesonException, OptionKey + +if T.TYPE_CHECKING: + from ..interpreter import Interpreter + +INDENT = '\t' +XCODETYPEMAP = {'c': 'sourcecode.c.c', + 'a': 'archive.ar', + 'cc': 'sourcecode.cpp.cpp', + 'cxx': 'sourcecode.cpp.cpp', + 'cpp': 'sourcecode.cpp.cpp', + 'c++': 'sourcecode.cpp.cpp', + 'm': 'sourcecode.c.objc', + 'mm': 'sourcecode.cpp.objcpp', + 'h': 'sourcecode.c.h', + 'hpp': 'sourcecode.cpp.h', + 'hxx': 'sourcecode.cpp.h', + 'hh': 'sourcecode.cpp.hh', + 'inc': 'sourcecode.c.h', + 'swift': 'sourcecode.swift', + 'dylib': 'compiled.mach-o.dylib', + 'o': 'compiled.mach-o.objfile', + 's': 'sourcecode.asm', + 'asm': 'sourcecode.asm', + } +LANGNAMEMAP = {'c': 'C', + 'cpp': 'CPLUSPLUS', + 'objc': 'OBJC', + 'objcpp': 'OBJCPLUSPLUS', + 'swift': 'SWIFT_' + } +OPT2XCODEOPT = {'plain': None, + '0': '0', + 'g': '0', + '1': '1', + '2': '2', + '3': '3', + 's': 's', + } +BOOL2XCODEBOOL = {True: 'YES', False: 'NO'} +LINKABLE_EXTENSIONS = {'.o', '.a', '.obj', '.so', '.dylib'} + +class FileTreeEntry: + + def __init__(self) -> None: + self.subdirs = {} + self.targets = [] + +class PbxArray: + def __init__(self) -> None: + self.items = [] + + def add_item(self, item: T.Union[PbxArrayItem, str], comment: str = '') -> None: + if isinstance(item, PbxArrayItem): + self.items.append(item) + else: + self.items.append(PbxArrayItem(item, comment)) + + def write(self, ofile: T.TextIO, indent_level: int) -> None: + ofile.write('(\n') + indent_level += 1 + for i in self.items: + if i.comment: + ofile.write(indent_level*INDENT + f'{i.value} {i.comment},\n') + else: + ofile.write(indent_level*INDENT + f'{i.value},\n') + indent_level -= 1 + ofile.write(indent_level*INDENT + ');\n') + +class PbxArrayItem: + def __init__(self, value: str, comment: str = ''): + self.value = value + if comment: + if '/*' in comment: + self.comment = comment + else: + self.comment = f'/* {comment} */' + else: + self.comment = comment + +class PbxComment: + def __init__(self, text: str): + assert isinstance(text, str) + assert '/*' not in text + self.text = f'/* {text} */' + + def write(self, ofile: T.TextIO, indent_level: int) -> None: + ofile.write(f'\n{self.text}\n') + +class PbxDictItem: + def __init__(self, key: str, value: T.Union[PbxArray, PbxDict, str, int], comment: str = ''): + self.key = key + self.value = value + if comment: + if '/*' in comment: + self.comment = comment + else: + self.comment = f'/* {comment} */' + else: + self.comment = comment + +class PbxDict: + def __init__(self) -> None: + # This class is a bit weird, because we want to write PBX dicts in + # defined order _and_ we want to write intermediate comments also in order. + self.keys = set() + self.items = [] + + def add_item(self, key: str, value: T.Union[PbxArray, PbxDict, str, int], comment: str = '') -> None: + assert key not in self.keys + item = PbxDictItem(key, value, comment) + self.keys.add(key) + self.items.append(item) + + def has_item(self, key): + return key in self.keys + + def add_comment(self, comment: PbxComment) -> None: + assert isinstance(comment, PbxComment) + self.items.append(comment) + + def write(self, ofile: T.TextIO, indent_level: int) -> None: + ofile.write('{\n') + indent_level += 1 + for i in self.items: + if isinstance(i, PbxComment): + i.write(ofile, indent_level) + elif isinstance(i, PbxDictItem): + if isinstance(i.value, (str, int)): + if i.comment: + ofile.write(indent_level*INDENT + f'{i.key} = {i.value} {i.comment};\n') + else: + ofile.write(indent_level*INDENT + f'{i.key} = {i.value};\n') + elif isinstance(i.value, PbxDict): + if i.comment: + ofile.write(indent_level*INDENT + f'{i.key} {i.comment} = ') + else: + ofile.write(indent_level*INDENT + f'{i.key} = ') + i.value.write(ofile, indent_level) + elif isinstance(i.value, PbxArray): + if i.comment: + ofile.write(indent_level*INDENT + f'{i.key} {i.comment} = ') + else: + ofile.write(indent_level*INDENT + f'{i.key} = ') + i.value.write(ofile, indent_level) + else: + print(i) + print(i.key) + print(i.value) + raise RuntimeError('missing code') + else: + print(i) + raise RuntimeError('missing code2') + + indent_level -= 1 + ofile.write(indent_level*INDENT + '}') + if indent_level == 0: + ofile.write('\n') + else: + ofile.write(';\n') + +class XCodeBackend(backends.Backend): + + name = 'xcode' + + def __init__(self, build: T.Optional[build.Build], interpreter: T.Optional[Interpreter]): + super().__init__(build, interpreter) + self.project_uid = self.environment.coredata.lang_guids['default'].replace('-', '')[:24] + self.buildtype = T.cast('str', self.environment.coredata.get_option(OptionKey('buildtype'))) + self.project_conflist = self.gen_id() + self.maingroup_id = self.gen_id() + self.all_id = self.gen_id() + self.all_buildconf_id = self.gen_id() + self.buildtypes = [self.buildtype] + self.test_id = self.gen_id() + self.test_command_id = self.gen_id() + self.test_buildconf_id = self.gen_id() + self.regen_id = self.gen_id() + self.regen_command_id = self.gen_id() + self.regen_buildconf_id = self.gen_id() + self.regen_dependency_id = self.gen_id() + self.top_level_dict = PbxDict() + self.generator_outputs = {} + # In Xcode files are not accessed via their file names, but rather every one of them + # gets an unique id. More precisely they get one unique id per target they are used + # in. If you generate only one id per file and use them, compilation will work but the + # UI will only show the file in one target but not the others. Thus they key is + # a tuple containing the target and filename. + self.buildfile_ids = {} + # That is not enough, though. Each target/file combination also gets a unique id + # in the file reference section. Because why not. This means that a source file + # that is used in two targets gets a total of four unique ID numbers. + self.fileref_ids = {} + + def write_pbxfile(self, top_level_dict, ofilename): + tmpname = ofilename + '.tmp' + with open(tmpname, 'w', encoding='utf-8') as ofile: + ofile.write('// !$*UTF8*$!\n') + top_level_dict.write(ofile, 0) + os.replace(tmpname, ofilename) + + def gen_id(self) -> str: + return str(uuid.uuid4()).upper().replace('-', '')[:24] + + def get_target_dir(self, target): + dirname = os.path.join(target.get_subdir(), T.cast('str', self.environment.coredata.get_option(OptionKey('buildtype')))) + #os.makedirs(os.path.join(self.environment.get_build_dir(), dirname), exist_ok=True) + return dirname + + def get_custom_target_output_dir(self, target): + dirname = target.get_subdir() + os.makedirs(os.path.join(self.environment.get_build_dir(), dirname), exist_ok=True) + return dirname + + def target_to_build_root(self, target): + if self.get_target_dir(target) == '': + return '' + directories = os.path.normpath(self.get_target_dir(target)).split(os.sep) + return os.sep.join(['..'] * len(directories)) + + def object_filename_from_source(self, target, source): + # Xcode has the following naming scheme: + # projectname.build/debug/prog@exe.build/Objects-normal/x86_64/func.o + project = self.build.project_name + buildtype = self.buildtype + tname = target.get_id() + arch = 'x86_64' + if isinstance(source, mesonlib.File): + source = source.fname + stem = os.path.splitext(os.path.basename(source))[0] + obj_path = f'{project}.build/{buildtype}/{tname}.build/Objects-normal/{arch}/{stem}.o' + return obj_path + + def generate(self, capture: bool = False, vslite_ctx: dict = None) -> T.Optional[dict]: + # Check for (currently) unexpected capture arg use cases - + if capture: + raise MesonBugException('We do not expect the xcode backend to generate with \'capture = True\'') + if vslite_ctx: + raise MesonBugException('We do not expect the xcode backend to be given a valid \'vslite_ctx\'') + self.serialize_tests() + # Cache the result as the method rebuilds the array every time it is called. + self.build_targets = self.build.get_build_targets() + self.custom_targets = self.build.get_custom_targets() + self.generate_filemap() + self.generate_buildstylemap() + self.generate_build_phase_map() + self.generate_build_configuration_map() + self.generate_build_configurationlist_map() + self.generate_project_configurations_map() + self.generate_buildall_configurations_map() + self.generate_test_configurations_map() + self.generate_native_target_map() + self.generate_native_frameworks_map() + self.generate_custom_target_map() + self.generate_generator_target_map() + self.generate_source_phase_map() + self.generate_target_dependency_map() + self.generate_pbxdep_map() + self.generate_containerproxy_map() + self.generate_target_file_maps() + self.generate_build_file_maps() + self.proj_dir = os.path.join(self.environment.get_build_dir(), self.build.project_name + '.xcodeproj') + os.makedirs(self.proj_dir, exist_ok=True) + self.proj_file = os.path.join(self.proj_dir, 'project.pbxproj') + objects_dict = self.generate_prefix(self.top_level_dict) + objects_dict.add_comment(PbxComment('Begin PBXAggregateTarget section')) + self.generate_pbx_aggregate_target(objects_dict) + objects_dict.add_comment(PbxComment('End PBXAggregateTarget section')) + objects_dict.add_comment(PbxComment('Begin PBXBuildFile section')) + self.generate_pbx_build_file(objects_dict) + objects_dict.add_comment(PbxComment('End PBXBuildFile section')) + objects_dict.add_comment(PbxComment('Begin PBXBuildStyle section')) + self.generate_pbx_build_style(objects_dict) + objects_dict.add_comment(PbxComment('End PBXBuildStyle section')) + objects_dict.add_comment(PbxComment('Begin PBXContainerItemProxy section')) + self.generate_pbx_container_item_proxy(objects_dict) + objects_dict.add_comment(PbxComment('End PBXContainerItemProxy section')) + objects_dict.add_comment(PbxComment('Begin PBXFileReference section')) + self.generate_pbx_file_reference(objects_dict) + objects_dict.add_comment(PbxComment('End PBXFileReference section')) + objects_dict.add_comment(PbxComment('Begin PBXFrameworksBuildPhase section')) + self.generate_pbx_frameworks_buildphase(objects_dict) + objects_dict.add_comment(PbxComment('End PBXFrameworksBuildPhase section')) + objects_dict.add_comment(PbxComment('Begin PBXGroup section')) + self.generate_pbx_group(objects_dict) + objects_dict.add_comment(PbxComment('End PBXGroup section')) + objects_dict.add_comment(PbxComment('Begin PBXNativeTarget section')) + self.generate_pbx_native_target(objects_dict) + objects_dict.add_comment(PbxComment('End PBXNativeTarget section')) + objects_dict.add_comment(PbxComment('Begin PBXProject section')) + self.generate_pbx_project(objects_dict) + objects_dict.add_comment(PbxComment('End PBXProject section')) + objects_dict.add_comment(PbxComment('Begin PBXShellScriptBuildPhase section')) + self.generate_pbx_shell_build_phase(objects_dict) + objects_dict.add_comment(PbxComment('End PBXShellScriptBuildPhase section')) + objects_dict.add_comment(PbxComment('Begin PBXSourcesBuildPhase section')) + self.generate_pbx_sources_build_phase(objects_dict) + objects_dict.add_comment(PbxComment('End PBXSourcesBuildPhase section')) + objects_dict.add_comment(PbxComment('Begin PBXTargetDependency section')) + self.generate_pbx_target_dependency(objects_dict) + objects_dict.add_comment(PbxComment('End PBXTargetDependency section')) + objects_dict.add_comment(PbxComment('Begin XCBuildConfiguration section')) + self.generate_xc_build_configuration(objects_dict) + objects_dict.add_comment(PbxComment('End XCBuildConfiguration section')) + objects_dict.add_comment(PbxComment('Begin XCConfigurationList section')) + self.generate_xc_configurationList(objects_dict) + objects_dict.add_comment(PbxComment('End XCConfigurationList section')) + self.generate_suffix(self.top_level_dict) + self.write_pbxfile(self.top_level_dict, self.proj_file) + self.generate_regen_info() + + def get_xcodetype(self, fname): + extension = fname.split('.')[-1] + if extension == 'C': + extension = 'cpp' + xcodetype = XCODETYPEMAP.get(extension.lower()) + if not xcodetype: + xcodetype = 'sourcecode.unknown' + return xcodetype + + def generate_filemap(self) -> None: + self.filemap = {} # Key is source file relative to src root. + self.target_filemap = {} + for name, t in self.build_targets.items(): + for s in t.sources: + if isinstance(s, mesonlib.File): + s = os.path.join(s.subdir, s.fname) + self.filemap[s] = self.gen_id() + for o in t.objects: + if isinstance(o, str): + o = os.path.join(t.subdir, o) + self.filemap[o] = self.gen_id() + self.target_filemap[name] = self.gen_id() + + def generate_buildstylemap(self) -> None: + self.buildstylemap = {self.buildtype: self.gen_id()} + + def generate_build_phase_map(self) -> None: + for tname, t in self.build_targets.items(): + # generate id for our own target-name + t.buildphasemap = {} + t.buildphasemap[tname] = self.gen_id() + # each target can have it's own Frameworks/Sources/..., generate id's for those + t.buildphasemap['Frameworks'] = self.gen_id() + t.buildphasemap['Resources'] = self.gen_id() + t.buildphasemap['Sources'] = self.gen_id() + + def generate_build_configuration_map(self) -> None: + self.buildconfmap = {} + for t in self.build_targets: + bconfs = {self.buildtype: self.gen_id()} + self.buildconfmap[t] = bconfs + for t in self.custom_targets: + bconfs = {self.buildtype: self.gen_id()} + self.buildconfmap[t] = bconfs + + def generate_project_configurations_map(self) -> None: + self.project_configurations = {self.buildtype: self.gen_id()} + + def generate_buildall_configurations_map(self) -> None: + self.buildall_configurations = {self.buildtype: self.gen_id()} + + def generate_test_configurations_map(self) -> None: + self.test_configurations = {self.buildtype: self.gen_id()} + + def generate_build_configurationlist_map(self) -> None: + self.buildconflistmap = {} + for t in self.build_targets: + self.buildconflistmap[t] = self.gen_id() + for t in self.custom_targets: + self.buildconflistmap[t] = self.gen_id() + + def generate_native_target_map(self) -> None: + self.native_targets = {} + for t in self.build_targets: + self.native_targets[t] = self.gen_id() + + def generate_custom_target_map(self) -> None: + self.shell_targets = {} + self.custom_target_output_buildfile = {} + self.custom_target_output_fileref = {} + for tname, t in self.custom_targets.items(): + self.shell_targets[tname] = self.gen_id() + if not isinstance(t, build.CustomTarget): + continue + (srcs, ofilenames, cmd) = self.eval_custom_target_command(t) + for o in ofilenames: + self.custom_target_output_buildfile[o] = self.gen_id() + self.custom_target_output_fileref[o] = self.gen_id() + + def generate_generator_target_map(self) -> None: + # Generator objects do not have natural unique ids + # so use a counter. + self.generator_fileref_ids = {} + self.generator_buildfile_ids = {} + for tname, t in self.build_targets.items(): + generator_id = 0 + for genlist in t.generated: + if not isinstance(genlist, build.GeneratedList): + continue + self.gen_single_target_map(genlist, tname, t, generator_id) + generator_id += 1 + # FIXME add outputs. + for tname, t in self.custom_targets.items(): + generator_id = 0 + for genlist in t.sources: + if not isinstance(genlist, build.GeneratedList): + continue + self.gen_single_target_map(genlist, tname, t, generator_id) + generator_id += 1 + + def gen_single_target_map(self, genlist, tname, t, generator_id): + k = (tname, generator_id) + assert k not in self.shell_targets + self.shell_targets[k] = self.gen_id() + ofile_abs = [] + for i in genlist.get_inputs(): + for o_base in genlist.get_outputs_for(i): + o = os.path.join(self.get_target_private_dir(t), o_base) + ofile_abs.append(os.path.join(self.environment.get_build_dir(), o)) + assert k not in self.generator_outputs + self.generator_outputs[k] = ofile_abs + buildfile_ids = [] + fileref_ids = [] + for i in range(len(ofile_abs)): + buildfile_ids.append(self.gen_id()) + fileref_ids.append(self.gen_id()) + self.generator_buildfile_ids[k] = buildfile_ids + self.generator_fileref_ids[k] = fileref_ids + + def generate_native_frameworks_map(self) -> None: + self.native_frameworks = {} + self.native_frameworks_fileref = {} + for t in self.build_targets.values(): + for dep in t.get_external_deps(): + if isinstance(dep, dependencies.AppleFrameworks): + for f in dep.frameworks: + self.native_frameworks[f] = self.gen_id() + self.native_frameworks_fileref[f] = self.gen_id() + + def generate_target_dependency_map(self) -> None: + self.target_dependency_map = {} + for tname, t in self.build_targets.items(): + for target in t.link_targets: + if isinstance(target, build.CustomTargetIndex): + k = (tname, target.target.get_basename()) + if k in self.target_dependency_map: + continue + else: + k = (tname, target.get_basename()) + assert k not in self.target_dependency_map + self.target_dependency_map[k] = self.gen_id() + for tname, t in self.custom_targets.items(): + k = tname + assert k not in self.target_dependency_map + self.target_dependency_map[k] = self.gen_id() + + def generate_pbxdep_map(self) -> None: + self.pbx_dep_map = {} + self.pbx_custom_dep_map = {} + for t in self.build_targets: + self.pbx_dep_map[t] = self.gen_id() + for t in self.custom_targets: + self.pbx_custom_dep_map[t] = self.gen_id() + + def generate_containerproxy_map(self) -> None: + self.containerproxy_map = {} + for t in self.build_targets: + self.containerproxy_map[t] = self.gen_id() + + def generate_target_file_maps(self) -> None: + self.generate_target_file_maps_impl(self.build_targets) + self.generate_target_file_maps_impl(self.custom_targets) + + def generate_target_file_maps_impl(self, targets): + for tname, t in targets.items(): + for s in t.sources: + if isinstance(s, mesonlib.File): + s = os.path.join(s.subdir, s.fname) + if not isinstance(s, str): + continue + k = (tname, s) + assert k not in self.buildfile_ids + self.buildfile_ids[k] = self.gen_id() + assert k not in self.fileref_ids + self.fileref_ids[k] = self.gen_id() + if not hasattr(t, 'objects'): + continue + for o in t.objects: + if isinstance(o, build.ExtractedObjects): + # Extracted objects do not live in "the Xcode world". + continue + if isinstance(o, mesonlib.File): + o = os.path.join(o.subdir, o.fname) + if isinstance(o, str): + o = os.path.join(t.subdir, o) + k = (tname, o) + assert k not in self.buildfile_ids + self.buildfile_ids[k] = self.gen_id() + assert k not in self.fileref_ids + self.fileref_ids[k] = self.gen_id() + else: + raise RuntimeError('Unknown input type ' + str(o)) + + def generate_build_file_maps(self) -> None: + for buildfile in self.interpreter.get_build_def_files(): + assert isinstance(buildfile, str) + self.buildfile_ids[buildfile] = self.gen_id() + self.fileref_ids[buildfile] = self.gen_id() + + def generate_source_phase_map(self) -> None: + self.source_phase = {} + for t in self.build_targets: + self.source_phase[t] = self.gen_id() + + def generate_pbx_aggregate_target(self, objects_dict): + self.custom_aggregate_targets = {} + self.build_all_tdep_id = self.gen_id() + # FIXME: filter out targets that are not built by default. + target_dependencies = [self.pbx_dep_map[t] for t in self.build_targets] + custom_target_dependencies = [self.pbx_custom_dep_map[t] for t in self.custom_targets] + aggregated_targets = [] + aggregated_targets.append((self.all_id, 'ALL_BUILD', + self.all_buildconf_id, + [], + [self.regen_dependency_id] + target_dependencies + custom_target_dependencies)) + aggregated_targets.append((self.test_id, + 'RUN_TESTS', + self.test_buildconf_id, + [self.test_command_id], + [self.regen_dependency_id, self.build_all_tdep_id])) + aggregated_targets.append((self.regen_id, + 'REGENERATE', + self.regen_buildconf_id, + [self.regen_command_id], + [])) + for tname, t in self.build.get_custom_targets().items(): + ct_id = self.gen_id() + self.custom_aggregate_targets[tname] = ct_id + build_phases = [] + dependencies = [self.regen_dependency_id] + generator_id = 0 + for s in t.sources: + if not isinstance(s, build.GeneratedList): + continue + build_phases.append(self.shell_targets[(tname, generator_id)]) + for d in s.depends: + dependencies.append(self.pbx_custom_dep_map[d.get_id()]) + generator_id += 1 + build_phases.append(self.shell_targets[tname]) + aggregated_targets.append((ct_id, tname, self.buildconflistmap[tname], build_phases, dependencies)) + + # Sort objects by ID before writing + sorted_aggregated_targets = sorted(aggregated_targets, key=operator.itemgetter(0)) + for t in sorted_aggregated_targets: + agt_dict = PbxDict() + name = t[1] + buildconf_id = t[2] + build_phases = t[3] + dependencies = t[4] + agt_dict.add_item('isa', 'PBXAggregateTarget') + agt_dict.add_item('buildConfigurationList', buildconf_id, f'Build configuration list for PBXAggregateTarget "{name}"') + bp_arr = PbxArray() + agt_dict.add_item('buildPhases', bp_arr) + for bp in build_phases: + bp_arr.add_item(bp, 'ShellScript') + dep_arr = PbxArray() + agt_dict.add_item('dependencies', dep_arr) + for td in dependencies: + dep_arr.add_item(td, 'PBXTargetDependency') + agt_dict.add_item('name', f'"{name}"') + agt_dict.add_item('productName', f'"{name}"') + objects_dict.add_item(t[0], agt_dict, name) + + def generate_pbx_build_file(self, objects_dict): + for tname, t in self.build_targets.items(): + for dep in t.get_external_deps(): + if isinstance(dep, dependencies.AppleFrameworks): + for f in dep.frameworks: + fw_dict = PbxDict() + fwkey = self.native_frameworks[f] + if fwkey not in objects_dict.keys: + objects_dict.add_item(fwkey, fw_dict, f'{f}.framework in Frameworks') + fw_dict.add_item('isa', 'PBXBuildFile') + fw_dict.add_item('fileRef', self.native_frameworks_fileref[f], f) + + for s in t.sources: + in_build_dir = False + if isinstance(s, mesonlib.File): + if s.is_built: + in_build_dir = True + s = os.path.join(s.subdir, s.fname) + + if not isinstance(s, str): + continue + sdict = PbxDict() + k = (tname, s) + idval = self.buildfile_ids[k] + fileref = self.fileref_ids[k] + if in_build_dir: + fullpath = os.path.join(self.environment.get_build_dir(), s) + else: + fullpath = os.path.join(self.environment.get_source_dir(), s) + sdict.add_item('isa', 'PBXBuildFile') + sdict.add_item('fileRef', fileref, fullpath) + objects_dict.add_item(idval, sdict) + + for o in t.objects: + if isinstance(o, build.ExtractedObjects): + # Object files are not source files as such. We add them + # by hand in linker flags. It is also not particularly + # clear how to define build files in Xcode's file format. + continue + if isinstance(o, mesonlib.File): + o = os.path.join(o.subdir, o.fname) + elif isinstance(o, str): + o = os.path.join(t.subdir, o) + idval = self.buildfile_ids[(tname, o)] + k = (tname, o) + fileref = self.fileref_ids[k] + assert o not in self.filemap + self.filemap[o] = idval + fullpath = os.path.join(self.environment.get_source_dir(), o) + fullpath2 = fullpath + o_dict = PbxDict() + objects_dict.add_item(idval, o_dict, fullpath) + o_dict.add_item('isa', 'PBXBuildFile') + o_dict.add_item('fileRef', fileref, fullpath2) + + generator_id = 0 + for g in t.generated: + if not isinstance(g, build.GeneratedList): + continue + self.create_generator_shellphase(objects_dict, tname, generator_id) + generator_id += 1 + + # Custom targets are shell build phases in Xcode terminology. + for tname, t in self.custom_targets.items(): + if not isinstance(t, build.CustomTarget): + continue + (srcs, ofilenames, cmd) = self.eval_custom_target_command(t) + for o in ofilenames: + custom_dict = PbxDict() + objects_dict.add_item(self.custom_target_output_buildfile[o], custom_dict, f'/* {o} */') + custom_dict.add_item('isa', 'PBXBuildFile') + custom_dict.add_item('fileRef', self.custom_target_output_fileref[o]) + generator_id = 0 + for g in t.sources: + if not isinstance(g, build.GeneratedList): + continue + self.create_generator_shellphase(objects_dict, tname, generator_id) + generator_id += 1 + + def create_generator_shellphase(self, objects_dict, tname, generator_id): + file_ids = self.generator_buildfile_ids[(tname, generator_id)] + ref_ids = self.generator_fileref_ids[(tname, generator_id)] + assert len(ref_ids) == len(file_ids) + for file_o, ref_id in zip(file_ids, ref_ids): + odict = PbxDict() + objects_dict.add_item(file_o, odict) + odict.add_item('isa', 'PBXBuildFile') + odict.add_item('fileRef', ref_id) + + def generate_pbx_build_style(self, objects_dict): + # FIXME: Xcode 9 and later does not uses PBXBuildStyle and it gets removed. Maybe we can remove this part. + for name, idval in self.buildstylemap.items(): + styledict = PbxDict() + objects_dict.add_item(idval, styledict, name) + styledict.add_item('isa', 'PBXBuildStyle') + settings_dict = PbxDict() + styledict.add_item('buildSettings', settings_dict) + settings_dict.add_item('COPY_PHASE_STRIP', 'NO') + styledict.add_item('name', f'"{name}"') + + def generate_pbx_container_item_proxy(self, objects_dict): + for t in self.build_targets: + proxy_dict = PbxDict() + objects_dict.add_item(self.containerproxy_map[t], proxy_dict, 'PBXContainerItemProxy') + proxy_dict.add_item('isa', 'PBXContainerItemProxy') + proxy_dict.add_item('containerPortal', self.project_uid, 'Project object') + proxy_dict.add_item('proxyType', '1') + proxy_dict.add_item('remoteGlobalIDString', self.native_targets[t]) + proxy_dict.add_item('remoteInfo', '"' + t + '"') + + def generate_pbx_file_reference(self, objects_dict): + for tname, t in self.build_targets.items(): + for dep in t.get_external_deps(): + if isinstance(dep, dependencies.AppleFrameworks): + for f in dep.frameworks: + fw_dict = PbxDict() + framework_fileref = self.native_frameworks_fileref[f] + if objects_dict.has_item(framework_fileref): + continue + objects_dict.add_item(framework_fileref, fw_dict, f) + fw_dict.add_item('isa', 'PBXFileReference') + fw_dict.add_item('lastKnownFileType', 'wrapper.framework') + fw_dict.add_item('name', f'{f}.framework') + fw_dict.add_item('path', f'System/Library/Frameworks/{f}.framework') + fw_dict.add_item('sourceTree', 'SDKROOT') + for s in t.sources: + in_build_dir = False + if isinstance(s, mesonlib.File): + if s.is_built: + in_build_dir = True + s = os.path.join(s.subdir, s.fname) + if not isinstance(s, str): + continue + idval = self.fileref_ids[(tname, s)] + fullpath = os.path.join(self.environment.get_source_dir(), s) + src_dict = PbxDict() + xcodetype = self.get_xcodetype(s) + name = os.path.basename(s) + path = s + objects_dict.add_item(idval, src_dict, fullpath) + src_dict.add_item('isa', 'PBXFileReference') + src_dict.add_item('explicitFileType', '"' + xcodetype + '"') + src_dict.add_item('fileEncoding', '4') + if in_build_dir: + src_dict.add_item('name', '"' + name + '"') + # This makes no sense. This should say path instead of name + # but then the path gets added twice. + src_dict.add_item('path', '"' + name + '"') + src_dict.add_item('sourceTree', 'BUILD_ROOT') + else: + src_dict.add_item('name', '"' + name + '"') + src_dict.add_item('path', '"' + path + '"') + src_dict.add_item('sourceTree', 'SOURCE_ROOT') + + generator_id = 0 + for g in t.generated: + if not isinstance(g, build.GeneratedList): + continue + outputs = self.generator_outputs[(tname, generator_id)] + ref_ids = self.generator_fileref_ids[tname, generator_id] + assert len(ref_ids) == len(outputs) + for o, ref_id in zip(outputs, ref_ids): + odict = PbxDict() + name = os.path.basename(o) + objects_dict.add_item(ref_id, odict, o) + xcodetype = self.get_xcodetype(o) + rel_name = mesonlib.relpath(o, self.environment.get_source_dir()) + odict.add_item('isa', 'PBXFileReference') + odict.add_item('explicitFileType', '"' + xcodetype + '"') + odict.add_item('fileEncoding', '4') + odict.add_item('name', f'"{name}"') + odict.add_item('path', f'"{rel_name}"') + odict.add_item('sourceTree', 'SOURCE_ROOT') + + generator_id += 1 + + for o in t.objects: + if isinstance(o, build.ExtractedObjects): + # Same as with pbxbuildfile. + continue + if isinstance(o, mesonlib.File): + fullpath = o.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir()) + o = os.path.join(o.subdir, o.fname) + else: + o = os.path.join(t.subdir, o) + fullpath = os.path.join(self.environment.get_source_dir(), o) + idval = self.fileref_ids[(tname, o)] + rel_name = mesonlib.relpath(fullpath, self.environment.get_source_dir()) + o_dict = PbxDict() + name = os.path.basename(o) + objects_dict.add_item(idval, o_dict, fullpath) + o_dict.add_item('isa', 'PBXFileReference') + o_dict.add_item('explicitFileType', '"' + self.get_xcodetype(o) + '"') + o_dict.add_item('fileEncoding', '4') + o_dict.add_item('name', f'"{name}"') + o_dict.add_item('path', f'"{rel_name}"') + o_dict.add_item('sourceTree', 'SOURCE_ROOT') + for tname, idval in self.target_filemap.items(): + target_dict = PbxDict() + objects_dict.add_item(idval, target_dict, tname) + t = self.build_targets[tname] + fname = t.get_filename() + reftype = 0 + if isinstance(t, build.Executable): + typestr = 'compiled.mach-o.executable' + path = fname + elif isinstance(t, build.SharedLibrary): + typestr = self.get_xcodetype('dummy.dylib') + path = fname + else: + typestr = self.get_xcodetype(fname) + path = '"%s"' % t.get_filename() + target_dict.add_item('isa', 'PBXFileReference') + target_dict.add_item('explicitFileType', '"' + typestr + '"') + if ' ' in path and path[0] != '"': + target_dict.add_item('path', f'"{path}"') + else: + target_dict.add_item('path', path) + target_dict.add_item('refType', reftype) + target_dict.add_item('sourceTree', 'BUILT_PRODUCTS_DIR') + + for tname, t in self.custom_targets.items(): + if not isinstance(t, build.CustomTarget): + continue + (srcs, ofilenames, cmd) = self.eval_custom_target_command(t) + for s in t.sources: + if isinstance(s, mesonlib.File): + s = os.path.join(s.subdir, s.fname) + elif isinstance(s, str): + s = os.path.join(t.subdir, s) + else: + continue + custom_dict = PbxDict() + typestr = self.get_xcodetype(s) + custom_dict.add_item('isa', 'PBXFileReference') + custom_dict.add_item('explicitFileType', '"' + typestr + '"') + custom_dict.add_item('name', f'"{s}"') + custom_dict.add_item('path', f'"{s}"') + custom_dict.add_item('refType', 0) + custom_dict.add_item('sourceTree', 'SOURCE_ROOT') + objects_dict.add_item(self.fileref_ids[(tname, s)], custom_dict) + for o in ofilenames: + custom_dict = PbxDict() + typestr = self.get_xcodetype(o) + custom_dict.add_item('isa', 'PBXFileReference') + custom_dict.add_item('explicitFileType', '"' + typestr + '"') + custom_dict.add_item('name', o) + custom_dict.add_item('path', os.path.join(self.src_to_build, o)) + custom_dict.add_item('refType', 0) + custom_dict.add_item('sourceTree', 'SOURCE_ROOT') + objects_dict.add_item(self.custom_target_output_fileref[o], custom_dict) + + for buildfile in self.interpreter.get_build_def_files(): + basename = os.path.split(buildfile)[1] + buildfile_dict = PbxDict() + typestr = self.get_xcodetype(buildfile) + buildfile_dict.add_item('isa', 'PBXFileReference') + buildfile_dict.add_item('explicitFileType', '"' + typestr + '"') + buildfile_dict.add_item('name', f'"{basename}"') + buildfile_dict.add_item('path', f'"{buildfile}"') + buildfile_dict.add_item('refType', 0) + buildfile_dict.add_item('sourceTree', 'SOURCE_ROOT') + objects_dict.add_item(self.fileref_ids[buildfile], buildfile_dict) + + def generate_pbx_frameworks_buildphase(self, objects_dict): + for t in self.build_targets.values(): + bt_dict = PbxDict() + objects_dict.add_item(t.buildphasemap['Frameworks'], bt_dict, 'Frameworks') + bt_dict.add_item('isa', 'PBXFrameworksBuildPhase') + bt_dict.add_item('buildActionMask', 2147483647) + file_list = PbxArray() + bt_dict.add_item('files', file_list) + for dep in t.get_external_deps(): + if isinstance(dep, dependencies.AppleFrameworks): + for f in dep.frameworks: + file_list.add_item(self.native_frameworks[f], f'{f}.framework in Frameworks') + bt_dict.add_item('runOnlyForDeploymentPostprocessing', 0) + + def generate_pbx_group(self, objects_dict): + groupmap = {} + target_src_map = {} + for t in self.build_targets: + groupmap[t] = self.gen_id() + target_src_map[t] = self.gen_id() + for t in self.custom_targets: + groupmap[t] = self.gen_id() + target_src_map[t] = self.gen_id() + projecttree_id = self.gen_id() + resources_id = self.gen_id() + products_id = self.gen_id() + frameworks_id = self.gen_id() + main_dict = PbxDict() + objects_dict.add_item(self.maingroup_id, main_dict) + main_dict.add_item('isa', 'PBXGroup') + main_children = PbxArray() + main_dict.add_item('children', main_children) + main_children.add_item(projecttree_id, 'Project tree') + main_children.add_item(resources_id, 'Resources') + main_children.add_item(products_id, 'Products') + main_children.add_item(frameworks_id, 'Frameworks') + main_dict.add_item('sourceTree', '""') + + self.add_projecttree(objects_dict, projecttree_id) + + resource_dict = PbxDict() + objects_dict.add_item(resources_id, resource_dict, 'Resources') + resource_dict.add_item('isa', 'PBXGroup') + resource_children = PbxArray() + resource_dict.add_item('children', resource_children) + resource_dict.add_item('name', 'Resources') + resource_dict.add_item('sourceTree', '""') + + frameworks_dict = PbxDict() + objects_dict.add_item(frameworks_id, frameworks_dict, 'Frameworks') + frameworks_dict.add_item('isa', 'PBXGroup') + frameworks_children = PbxArray() + frameworks_dict.add_item('children', frameworks_children) + # write frameworks + + for t in self.build_targets.values(): + for dep in t.get_external_deps(): + if isinstance(dep, dependencies.AppleFrameworks): + for f in dep.frameworks: + frameworks_children.add_item(self.native_frameworks_fileref[f], f) + + frameworks_dict.add_item('name', 'Frameworks') + frameworks_dict.add_item('sourceTree', '""') + + for tname, t in self.custom_targets.items(): + target_dict = PbxDict() + objects_dict.add_item(groupmap[tname], target_dict, tname) + target_dict.add_item('isa', 'PBXGroup') + target_children = PbxArray() + target_dict.add_item('children', target_children) + target_children.add_item(target_src_map[tname], 'Source files') + if t.subproject: + target_dict.add_item('name', f'"{t.subproject} • {t.name}"') + else: + target_dict.add_item('name', f'"{t.name}"') + target_dict.add_item('sourceTree', '""') + source_files_dict = PbxDict() + objects_dict.add_item(target_src_map[tname], source_files_dict, 'Source files') + source_files_dict.add_item('isa', 'PBXGroup') + source_file_children = PbxArray() + source_files_dict.add_item('children', source_file_children) + for s in t.sources: + if isinstance(s, mesonlib.File): + s = os.path.join(s.subdir, s.fname) + elif isinstance(s, str): + s = os.path.join(t.subdir, s) + else: + continue + source_file_children.add_item(self.fileref_ids[(tname, s)], s) + source_files_dict.add_item('name', '"Source files"') + source_files_dict.add_item('sourceTree', '""') + + # And finally products + product_dict = PbxDict() + objects_dict.add_item(products_id, product_dict, 'Products') + product_dict.add_item('isa', 'PBXGroup') + product_children = PbxArray() + product_dict.add_item('children', product_children) + for t in self.build_targets: + product_children.add_item(self.target_filemap[t], t) + product_dict.add_item('name', 'Products') + product_dict.add_item('sourceTree', '""') + + def write_group_target_entry(self, objects_dict, t): + tid = t.get_id() + group_id = self.gen_id() + target_dict = PbxDict() + objects_dict.add_item(group_id, target_dict, tid) + target_dict.add_item('isa', 'PBXGroup') + target_children = PbxArray() + target_dict.add_item('children', target_children) + target_dict.add_item('name', f'"{t} · target"') + target_dict.add_item('sourceTree', '""') + source_files_dict = PbxDict() + for s in t.sources: + if isinstance(s, mesonlib.File): + s = os.path.join(s.subdir, s.fname) + elif isinstance(s, str): + s = os.path.join(t.subdir, s) + else: + continue + target_children.add_item(self.fileref_ids[(tid, s)], s) + for o in t.objects: + if isinstance(o, build.ExtractedObjects): + # Do not show built object files in the project tree. + continue + if isinstance(o, mesonlib.File): + o = os.path.join(o.subdir, o.fname) + else: + o = os.path.join(t.subdir, o) + target_children.add_item(self.fileref_ids[(tid, o)], o) + source_files_dict.add_item('name', '"Source files"') + source_files_dict.add_item('sourceTree', '""') + return group_id + + def add_projecttree(self, objects_dict, projecttree_id): + root_dict = PbxDict() + objects_dict.add_item(projecttree_id, root_dict, "Root of project tree") + root_dict.add_item('isa', 'PBXGroup') + target_children = PbxArray() + root_dict.add_item('children', target_children) + root_dict.add_item('name', '"Project root"') + root_dict.add_item('sourceTree', '""') + + project_tree = self.generate_project_tree() + self.write_tree(objects_dict, project_tree, target_children, '') + + def write_tree(self, objects_dict, tree_node, children_array, current_subdir): + for subdir_name, subdir_node in tree_node.subdirs.items(): + subdir_dict = PbxDict() + subdir_children = PbxArray() + subdir_id = self.gen_id() + objects_dict.add_item(subdir_id, subdir_dict) + children_array.add_item(subdir_id) + subdir_dict.add_item('isa', 'PBXGroup') + subdir_dict.add_item('children', subdir_children) + subdir_dict.add_item('name', f'"{subdir_name}"') + subdir_dict.add_item('sourceTree', '""') + self.write_tree(objects_dict, subdir_node, subdir_children, os.path.join(current_subdir, subdir_name)) + for target in tree_node.targets: + group_id = self.write_group_target_entry(objects_dict, target) + children_array.add_item(group_id) + potentials = [os.path.join(current_subdir, 'meson.build'), + os.path.join(current_subdir, 'meson.options'), + os.path.join(current_subdir, 'meson_options.txt')] + for bf in potentials: + i = self.fileref_ids.get(bf, None) + if i: + children_array.add_item(i) + + def generate_project_tree(self): + tree_info = FileTreeEntry() + for tname, t in self.build_targets.items(): + self.add_target_to_tree(tree_info, t) + return tree_info + + def add_target_to_tree(self, tree_root, t): + current_node = tree_root + path_segments = t.subdir.split('/') + for s in path_segments: + if not s: + continue + if s not in current_node.subdirs: + current_node.subdirs[s] = FileTreeEntry() + current_node = current_node.subdirs[s] + current_node.targets.append(t) + + def generate_pbx_native_target(self, objects_dict): + for tname, idval in self.native_targets.items(): + ntarget_dict = PbxDict() + t = self.build_targets[tname] + objects_dict.add_item(idval, ntarget_dict, tname) + ntarget_dict.add_item('isa', 'PBXNativeTarget') + ntarget_dict.add_item('buildConfigurationList', self.buildconflistmap[tname], f'Build configuration list for PBXNativeTarget "{tname}"') + buildphases_array = PbxArray() + ntarget_dict.add_item('buildPhases', buildphases_array) + generator_id = 0 + for g in t.generated: + # Custom target are handled via inter-target dependencies. + # Generators are built as a shellscriptbuildphase. + if isinstance(g, build.GeneratedList): + buildphases_array.add_item(self.shell_targets[(tname, generator_id)], f'Generator {generator_id}/{tname}') + generator_id += 1 + for bpname, bpval in t.buildphasemap.items(): + buildphases_array.add_item(bpval, f'{bpname} yyy') + ntarget_dict.add_item('buildRules', PbxArray()) + dep_array = PbxArray() + ntarget_dict.add_item('dependencies', dep_array) + dep_array.add_item(self.regen_dependency_id) + # These dependencies only tell Xcode that the deps must be built + # before this one. They don't set up linkage or anything + # like that. Those are set up in the XCBuildConfiguration. + for lt in self.build_targets[tname].link_targets: + # NOT DOCUMENTED, may need to make different links + # to same target have different targetdependency item. + if isinstance(lt, build.CustomTarget): + dep_array.add_item(self.pbx_custom_dep_map[lt.get_id()], lt.name) + elif isinstance(lt, build.CustomTargetIndex): + dep_array.add_item(self.pbx_custom_dep_map[lt.target.get_id()], lt.target.name) + else: + idval = self.pbx_dep_map[lt.get_id()] + dep_array.add_item(idval, 'PBXTargetDependency') + for o in t.objects: + if isinstance(o, build.ExtractedObjects): + source_target_id = o.target.get_id() + idval = self.pbx_dep_map[source_target_id] + dep_array.add_item(idval, 'PBXTargetDependency') + generator_id = 0 + for o in t.generated: + if isinstance(o, build.CustomTarget): + dep_array.add_item(self.pbx_custom_dep_map[o.get_id()], o.name) + elif isinstance(o, build.CustomTargetIndex): + dep_array.add_item(self.pbx_custom_dep_map[o.target.get_id()], o.target.name) + + generator_id += 1 + + ntarget_dict.add_item('name', f'"{tname}"') + ntarget_dict.add_item('productName', f'"{tname}"') + ntarget_dict.add_item('productReference', self.target_filemap[tname], tname) + if isinstance(t, build.Executable): + typestr = 'com.apple.product-type.tool' + elif isinstance(t, build.StaticLibrary): + typestr = 'com.apple.product-type.library.static' + elif isinstance(t, build.SharedLibrary): + typestr = 'com.apple.product-type.library.dynamic' + else: + raise MesonException('Unknown target type for %s' % tname) + ntarget_dict.add_item('productType', f'"{typestr}"') + + def generate_pbx_project(self, objects_dict): + project_dict = PbxDict() + objects_dict.add_item(self.project_uid, project_dict, 'Project object') + project_dict.add_item('isa', 'PBXProject') + attr_dict = PbxDict() + project_dict.add_item('attributes', attr_dict) + attr_dict.add_item('BuildIndependentTargetsInParallel', 'YES') + project_dict.add_item('buildConfigurationList', self.project_conflist, f'Build configuration list for PBXProject "{self.build.project_name}"') + project_dict.add_item('buildSettings', PbxDict()) + style_arr = PbxArray() + project_dict.add_item('buildStyles', style_arr) + for name, idval in self.buildstylemap.items(): + style_arr.add_item(idval, name) + project_dict.add_item('compatibilityVersion', '"Xcode 3.2"') + project_dict.add_item('hasScannedForEncodings', 0) + project_dict.add_item('mainGroup', self.maingroup_id) + project_dict.add_item('projectDirPath', '"' + self.environment.get_source_dir() + '"') + project_dict.add_item('projectRoot', '""') + targets_arr = PbxArray() + project_dict.add_item('targets', targets_arr) + targets_arr.add_item(self.all_id, 'ALL_BUILD') + targets_arr.add_item(self.test_id, 'RUN_TESTS') + targets_arr.add_item(self.regen_id, 'REGENERATE') + for t in self.build_targets: + targets_arr.add_item(self.native_targets[t], t) + for t in self.custom_targets: + targets_arr.add_item(self.custom_aggregate_targets[t], t) + + def generate_pbx_shell_build_phase(self, objects_dict): + self.generate_test_shell_build_phase(objects_dict) + self.generate_regen_shell_build_phase(objects_dict) + self.generate_custom_target_shell_build_phases(objects_dict) + self.generate_generator_target_shell_build_phases(objects_dict) + + def generate_test_shell_build_phase(self, objects_dict): + shell_dict = PbxDict() + objects_dict.add_item(self.test_command_id, shell_dict, 'ShellScript') + shell_dict.add_item('isa', 'PBXShellScriptBuildPhase') + shell_dict.add_item('buildActionMask', 2147483647) + shell_dict.add_item('files', PbxArray()) + shell_dict.add_item('inputPaths', PbxArray()) + shell_dict.add_item('outputPaths', PbxArray()) + shell_dict.add_item('runOnlyForDeploymentPostprocessing', 0) + shell_dict.add_item('shellPath', '/bin/sh') + cmd = mesonlib.get_meson_command() + ['test', '--no-rebuild', '-C', self.environment.get_build_dir()] + cmdstr = ' '.join(["'%s'" % i for i in cmd]) + shell_dict.add_item('shellScript', f'"{cmdstr}"') + shell_dict.add_item('showEnvVarsInLog', 0) + + def generate_regen_shell_build_phase(self, objects_dict): + shell_dict = PbxDict() + objects_dict.add_item(self.regen_command_id, shell_dict, 'ShellScript') + shell_dict.add_item('isa', 'PBXShellScriptBuildPhase') + shell_dict.add_item('buildActionMask', 2147483647) + shell_dict.add_item('files', PbxArray()) + shell_dict.add_item('inputPaths', PbxArray()) + shell_dict.add_item('outputPaths', PbxArray()) + shell_dict.add_item('runOnlyForDeploymentPostprocessing', 0) + shell_dict.add_item('shellPath', '/bin/sh') + cmd = mesonlib.get_meson_command() + ['--internal', 'regencheck', os.path.join(self.environment.get_build_dir(), 'meson-private')] + cmdstr = ' '.join(["'%s'" % i for i in cmd]) + shell_dict.add_item('shellScript', f'"{cmdstr}"') + shell_dict.add_item('showEnvVarsInLog', 0) + + def generate_custom_target_shell_build_phases(self, objects_dict): + # Custom targets are shell build phases in Xcode terminology. + for tname, t in self.custom_targets.items(): + if not isinstance(t, build.CustomTarget): + continue + (srcs, ofilenames, cmd) = self.eval_custom_target_command(t, absolute_outputs=True) + fixed_cmd, _ = self.as_meson_exe_cmdline(cmd[0], + cmd[1:], + capture=ofilenames[0] if t.capture else None, + feed=srcs[0] if t.feed else None, + env=t.env) + custom_dict = PbxDict() + objects_dict.add_item(self.shell_targets[tname], custom_dict, f'/* Custom target {tname} */') + custom_dict.add_item('isa', 'PBXShellScriptBuildPhase') + custom_dict.add_item('buildActionMask', 2147483647) + custom_dict.add_item('files', PbxArray()) + custom_dict.add_item('inputPaths', PbxArray()) + outarray = PbxArray() + custom_dict.add_item('name', '"Generate {}."'.format(ofilenames[0])) + custom_dict.add_item('outputPaths', outarray) + for o in ofilenames: + outarray.add_item(os.path.join(self.environment.get_build_dir(), o)) + custom_dict.add_item('runOnlyForDeploymentPostprocessing', 0) + custom_dict.add_item('shellPath', '/bin/sh') + workdir = self.environment.get_build_dir() + quoted_cmd = [] + for c in fixed_cmd: + quoted_cmd.append(c.replace('"', chr(92) + '"')) + cmdstr = ' '.join([f"\\'{x}\\'" for x in quoted_cmd]) + custom_dict.add_item('shellScript', f'"cd {workdir}; {cmdstr}"') + custom_dict.add_item('showEnvVarsInLog', 0) + + def generate_generator_target_shell_build_phases(self, objects_dict): + for tname, t in self.build_targets.items(): + generator_id = 0 + for genlist in t.generated: + if isinstance(genlist, build.GeneratedList): + self.generate_single_generator_phase(tname, t, genlist, generator_id, objects_dict) + generator_id += 1 + for tname, t in self.custom_targets.items(): + generator_id = 0 + for genlist in t.sources: + if isinstance(genlist, build.GeneratedList): + self.generate_single_generator_phase(tname, t, genlist, generator_id, objects_dict) + generator_id += 1 + + def generate_single_generator_phase(self, tname, t, genlist, generator_id, objects_dict): + # TODO: this should be rewritten to use the meson wrapper, like the other generators do + # Currently it doesn't handle a host binary that requires an exe wrapper correctly. + generator = genlist.get_generator() + exe = generator.get_exe() + exe_arr = self.build_target_to_cmd_array(exe) + workdir = self.environment.get_build_dir() + gen_dict = PbxDict() + objects_dict.add_item(self.shell_targets[(tname, generator_id)], gen_dict, f'"Generator {generator_id}/{tname}"') + infilelist = genlist.get_inputs() + outfilelist = genlist.get_outputs() + gen_dict.add_item('isa', 'PBXShellScriptBuildPhase') + gen_dict.add_item('buildActionMask', 2147483647) + gen_dict.add_item('files', PbxArray()) + gen_dict.add_item('inputPaths', PbxArray()) + gen_dict.add_item('name', f'"Generator {generator_id}/{tname}"') + commands = [["cd", workdir]] # Array of arrays, each one a single command, will get concatenated below. + k = (tname, generator_id) + ofile_abs = self.generator_outputs[k] + outarray = PbxArray() + gen_dict.add_item('outputPaths', outarray) + for of in ofile_abs: + outarray.add_item(of) + for i in infilelist: + # This might be needed to be added to inputPaths. It's not done yet as it is + # unclear whether it is necessary, what actually happens when it is defined + # and currently the build works without it. + #infile_abs = i.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir()) + infilename = i.rel_to_builddir(self.build_to_src) + base_args = generator.get_arglist(infilename) + for o_base in genlist.get_outputs_for(i): + o = os.path.join(self.get_target_private_dir(t), o_base) + args = [] + for arg in base_args: + arg = arg.replace("@INPUT@", infilename) + arg = arg.replace('@OUTPUT@', o).replace('@BUILD_DIR@', self.get_target_private_dir(t)) + arg = arg.replace("@CURRENT_SOURCE_DIR@", os.path.join(self.build_to_src, t.subdir)) + args.append(arg) + args = self.replace_outputs(args, self.get_target_private_dir(t), outfilelist) + args = self.replace_extra_args(args, genlist) + if generator.capture: + # When capturing, stdout is the output. Forward it with the shell. + full_command = ['('] + exe_arr + args + ['>', o, ')'] + else: + full_command = exe_arr + args + commands.append(full_command) + gen_dict.add_item('runOnlyForDeploymentPostprocessing', 0) + gen_dict.add_item('shellPath', '/bin/sh') + quoted_cmds = [] + for cmnd in commands: + q = [] + for c in cmnd: + if ' ' in c: + q.append(f'\\"{c}\\"') + else: + q.append(c) + quoted_cmds.append(' '.join(q)) + cmdstr = '"' + ' && '.join(quoted_cmds) + '"' + gen_dict.add_item('shellScript', cmdstr) + gen_dict.add_item('showEnvVarsInLog', 0) + + def generate_pbx_sources_build_phase(self, objects_dict): + for name in self.source_phase: + phase_dict = PbxDict() + t = self.build_targets[name] + objects_dict.add_item(t.buildphasemap[name], phase_dict, 'Sources') + phase_dict.add_item('isa', 'PBXSourcesBuildPhase') + phase_dict.add_item('buildActionMask', 2147483647) + file_arr = PbxArray() + phase_dict.add_item('files', file_arr) + for s in self.build_targets[name].sources: + s = os.path.join(s.subdir, s.fname) + if not self.environment.is_header(s): + file_arr.add_item(self.buildfile_ids[(name, s)], os.path.join(self.environment.get_source_dir(), s)) + generator_id = 0 + for gt in t.generated: + if isinstance(gt, build.CustomTarget): + (srcs, ofilenames, cmd) = self.eval_custom_target_command(gt) + for o in ofilenames: + file_arr.add_item(self.custom_target_output_buildfile[o], + os.path.join(self.environment.get_build_dir(), o)) + elif isinstance(gt, build.CustomTargetIndex): + for o in gt.get_outputs(): + file_arr.add_item(self.custom_target_output_buildfile[o], + os.path.join(self.environment.get_build_dir(), o)) + elif isinstance(gt, build.GeneratedList): + genfiles = self.generator_buildfile_ids[(name, generator_id)] + generator_id += 1 + for o in genfiles: + file_arr.add_item(o) + else: + raise RuntimeError('Unknown input type: ' + str(gt)) + phase_dict.add_item('runOnlyForDeploymentPostprocessing', 0) + + def generate_pbx_target_dependency(self, objects_dict): + all_dict = PbxDict() + objects_dict.add_item(self.build_all_tdep_id, all_dict, 'ALL_BUILD') + all_dict.add_item('isa', 'PBXTargetDependency') + all_dict.add_item('target', self.all_id) + targets = [] + targets.append((self.regen_dependency_id, self.regen_id, 'REGEN', None)) + for t in self.build_targets: + idval = self.pbx_dep_map[t] # VERIFY: is this correct? + targets.append((idval, self.native_targets[t], t, self.containerproxy_map[t])) + + for t in self.custom_targets: + idval = self.pbx_custom_dep_map[t] + targets.append((idval, self.custom_aggregate_targets[t], t, None)) # self.containerproxy_map[t])) + + # Sort object by ID + sorted_targets = sorted(targets, key=operator.itemgetter(0)) + for t in sorted_targets: + t_dict = PbxDict() + objects_dict.add_item(t[0], t_dict, 'PBXTargetDependency') + t_dict.add_item('isa', 'PBXTargetDependency') + t_dict.add_item('target', t[1], t[2]) + if t[3] is not None: + t_dict.add_item('targetProxy', t[3], 'PBXContainerItemProxy') + + def generate_xc_build_configuration(self, objects_dict): + # First the setup for the toplevel project. + for buildtype in self.buildtypes: + bt_dict = PbxDict() + objects_dict.add_item(self.project_configurations[buildtype], bt_dict, buildtype) + bt_dict.add_item('isa', 'XCBuildConfiguration') + settings_dict = PbxDict() + bt_dict.add_item('buildSettings', settings_dict) + settings_dict.add_item('ARCHS', '"$(NATIVE_ARCH_ACTUAL)"') + settings_dict.add_item('ONLY_ACTIVE_ARCH', 'YES') + settings_dict.add_item('SWIFT_VERSION', '5.0') + settings_dict.add_item('SDKROOT', '"macosx"') + settings_dict.add_item('SYMROOT', '"%s/build"' % self.environment.get_build_dir()) + bt_dict.add_item('name', f'"{buildtype}"') + + # Then the all target. + for buildtype in self.buildtypes: + bt_dict = PbxDict() + objects_dict.add_item(self.buildall_configurations[buildtype], bt_dict, buildtype) + bt_dict.add_item('isa', 'XCBuildConfiguration') + settings_dict = PbxDict() + bt_dict.add_item('buildSettings', settings_dict) + settings_dict.add_item('SYMROOT', '"%s"' % self.environment.get_build_dir()) + warn_array = PbxArray() + warn_array.add_item('"$(inherited)"') + settings_dict.add_item('WARNING_CFLAGS', warn_array) + + bt_dict.add_item('name', f'"{buildtype}"') + + # Then the test target. + for buildtype in self.buildtypes: + bt_dict = PbxDict() + objects_dict.add_item(self.test_configurations[buildtype], bt_dict, buildtype) + bt_dict.add_item('isa', 'XCBuildConfiguration') + settings_dict = PbxDict() + bt_dict.add_item('buildSettings', settings_dict) + settings_dict.add_item('SYMROOT', '"%s"' % self.environment.get_build_dir()) + warn_array = PbxArray() + settings_dict.add_item('WARNING_CFLAGS', warn_array) + warn_array.add_item('"$(inherited)"') + bt_dict.add_item('name', f'"{buildtype}"') + + # Now finally targets. + for target_name, target in self.build_targets.items(): + self.generate_single_build_target(objects_dict, target_name, target) + + for target_name, target in self.custom_targets.items(): + bt_dict = PbxDict() + objects_dict.add_item(self.buildconfmap[target_name][buildtype], bt_dict, buildtype) + bt_dict.add_item('isa', 'XCBuildConfiguration') + settings_dict = PbxDict() + bt_dict.add_item('buildSettings', settings_dict) + settings_dict.add_item('ARCHS', '"$(NATIVE_ARCH_ACTUAL)"') + settings_dict.add_item('ONLY_ACTIVE_ARCH', 'YES') + settings_dict.add_item('SDKROOT', '"macosx"') + settings_dict.add_item('SYMROOT', '"%s/build"' % self.environment.get_build_dir()) + bt_dict.add_item('name', f'"{buildtype}"') + + def determine_internal_dep_link_args(self, target, buildtype): + links_dylib = False + dep_libs = [] + for l in target.link_targets: + if isinstance(target, build.SharedModule) and isinstance(l, build.Executable): + continue + if isinstance(l, build.CustomTargetIndex): + rel_dir = self.get_custom_target_output_dir(l.target) + libname = l.get_filename() + elif isinstance(l, build.CustomTarget): + rel_dir = self.get_custom_target_output_dir(l) + libname = l.get_filename() + else: + rel_dir = self.get_target_dir(l) + libname = l.get_filename() + abs_path = os.path.join(self.environment.get_build_dir(), rel_dir, libname) + dep_libs.append("'%s'" % abs_path) + if isinstance(l, build.SharedLibrary): + links_dylib = True + if isinstance(l, build.StaticLibrary): + (sub_libs, sub_links_dylib) = self.determine_internal_dep_link_args(l, buildtype) + dep_libs += sub_libs + links_dylib = links_dylib or sub_links_dylib + return (dep_libs, links_dylib) + + def generate_single_build_target(self, objects_dict, target_name, target): + for buildtype in self.buildtypes: + dep_libs = [] + links_dylib = False + headerdirs = [] + for d in target.include_dirs: + for sd in d.incdirs: + cd = os.path.join(d.curdir, sd) + headerdirs.append(os.path.join(self.environment.get_source_dir(), cd)) + headerdirs.append(os.path.join(self.environment.get_build_dir(), cd)) + for extra in d.extra_build_dirs: + headerdirs.append(os.path.join(self.environment.get_build_dir(), extra)) + (dep_libs, links_dylib) = self.determine_internal_dep_link_args(target, buildtype) + if links_dylib: + dep_libs = ['-Wl,-search_paths_first', '-Wl,-headerpad_max_install_names'] + dep_libs + dylib_version = None + if isinstance(target, build.SharedLibrary): + if isinstance(target, build.SharedModule): + ldargs = [] + else: + ldargs = ['-dynamiclib'] + ldargs += ['-Wl,-headerpad_max_install_names'] + dep_libs + install_path = os.path.join(self.environment.get_build_dir(), target.subdir, buildtype) + dylib_version = target.soversion + else: + ldargs = dep_libs + install_path = '' + if dylib_version is not None: + product_name = target.get_basename() + '.' + dylib_version + else: + product_name = target.get_basename() + ldargs += target.link_args + # Swift is special. Again. You can't mix Swift with other languages + # in the same target. Thus for Swift we only use + if self.is_swift_target(target): + linker, stdlib_args = target.compilers['swift'], [] + else: + linker, stdlib_args = self.determine_linker_and_stdlib_args(target) + if not isinstance(target, build.StaticLibrary): + ldargs += self.build.get_project_link_args(linker, target.subproject, target.for_machine) + ldargs += self.build.get_global_link_args(linker, target.for_machine) + cargs = [] + for dep in target.get_external_deps(): + cargs += dep.get_compile_args() + ldargs += dep.get_link_args() + for o in target.objects: + # Add extracted objects to the link line by hand. + if isinstance(o, build.ExtractedObjects): + added_objs = set() + for objname_rel in self.determine_ext_objs(o): + objname_abs = os.path.join(self.environment.get_build_dir(), o.target.subdir, objname_rel) + if objname_abs not in added_objs: + added_objs.add(objname_abs) + ldargs += [r'\"' + objname_abs + r'\"'] + generator_id = 0 + for o in target.generated: + if isinstance(o, build.GeneratedList): + outputs = self.generator_outputs[target_name, generator_id] + generator_id += 1 + for o_abs in outputs: + if o_abs.endswith('.o') or o_abs.endswith('.obj'): + ldargs += [r'\"' + o_abs + r'\"'] + else: + if isinstance(o, build.CustomTarget): + (srcs, ofilenames, cmd) = self.eval_custom_target_command(o) + for ofname in ofilenames: + if os.path.splitext(ofname)[-1] in LINKABLE_EXTENSIONS: + ldargs += [r'\"' + os.path.join(self.environment.get_build_dir(), ofname) + r'\"'] + elif isinstance(o, build.CustomTargetIndex): + for ofname in o.get_outputs(): + if os.path.splitext(ofname)[-1] in LINKABLE_EXTENSIONS: + ldargs += [r'\"' + os.path.join(self.environment.get_build_dir(), ofname) + r'\"'] + else: + raise RuntimeError(o) + if isinstance(target, build.SharedModule): + ldargs += linker.get_std_shared_module_link_args(target.get_options()) + elif isinstance(target, build.SharedLibrary): + ldargs += linker.get_std_shared_lib_link_args() + ldstr = ' '.join(ldargs) + valid = self.buildconfmap[target_name][buildtype] + langargs = {} + for lang in self.environment.coredata.compilers[target.for_machine]: + if lang not in LANGNAMEMAP: + continue + compiler = target.compilers.get(lang) + if compiler is None: + continue + # Start with warning args + warn_args = compiler.get_warn_args(target.get_option(OptionKey('warning_level'))) + copt_proxy = target.get_options() + std_args = compiler.get_option_compile_args(copt_proxy) + # Add compile args added using add_project_arguments() + pargs = self.build.projects_args[target.for_machine].get(target.subproject, {}).get(lang, []) + # Add compile args added using add_global_arguments() + # These override per-project arguments + gargs = self.build.global_args[target.for_machine].get(lang, []) + targs = target.get_extra_args(lang) + args = warn_args + std_args + pargs + gargs + targs + if lang == 'swift': + # For some reason putting Swift module dirs in HEADER_SEARCH_PATHS does not work, + # but adding -I/path to manual args does work. + swift_dep_dirs = self.determine_swift_dep_dirs(target) + for d in swift_dep_dirs: + args += compiler.get_include_args(d, False) + if args: + lang_cargs = cargs + if compiler and target.implicit_include_directories: + # It is unclear what is the cwd when xcode runs. -I. does not seem to + # add the root build dir to the search path. So add an absolute path instead. + # This may break reproducible builds, in which case patches are welcome. + lang_cargs += self.get_custom_target_dir_include_args(target, compiler, absolute_path=True) + # Xcode cannot handle separate compilation flags for C and ObjectiveC. They are both + # put in OTHER_CFLAGS. Same with C++ and ObjectiveC++. + if lang == 'objc': + lang = 'c' + elif lang == 'objcpp': + lang = 'cpp' + langname = LANGNAMEMAP[lang] + if langname in langargs: + langargs[langname] += args + else: + langargs[langname] = args + langargs[langname] += lang_cargs + symroot = os.path.join(self.environment.get_build_dir(), target.subdir) + bt_dict = PbxDict() + objects_dict.add_item(valid, bt_dict, buildtype) + bt_dict.add_item('isa', 'XCBuildConfiguration') + settings_dict = PbxDict() + bt_dict.add_item('buildSettings', settings_dict) + settings_dict.add_item('COMBINE_HIDPI_IMAGES', 'YES') + if isinstance(target, build.SharedModule): + settings_dict.add_item('DYLIB_CURRENT_VERSION', '""') + settings_dict.add_item('DYLIB_COMPATIBILITY_VERSION', '""') + else: + if dylib_version is not None: + settings_dict.add_item('DYLIB_CURRENT_VERSION', f'"{dylib_version}"') + if target.prefix: + settings_dict.add_item('EXECUTABLE_PREFIX', target.prefix) + if target.suffix: + suffix = '.' + target.suffix + settings_dict.add_item('EXECUTABLE_SUFFIX', suffix) + settings_dict.add_item('GCC_GENERATE_DEBUGGING_SYMBOLS', BOOL2XCODEBOOL[target.get_option(OptionKey('debug'))]) + settings_dict.add_item('GCC_INLINES_ARE_PRIVATE_EXTERN', 'NO') + opt_flag = OPT2XCODEOPT[target.get_option(OptionKey('optimization'))] + if opt_flag is not None: + settings_dict.add_item('GCC_OPTIMIZATION_LEVEL', opt_flag) + if target.has_pch: + # Xcode uses GCC_PREFIX_HEADER which only allows one file per target/executable. Precompiling various header files and + # applying a particular pch to each source file will require custom scripts (as a build phase) and build flags per each + # file. Since Xcode itself already discourages precompiled headers in favor of modules we don't try much harder here. + pchs = target.get_pch('c') + target.get_pch('cpp') + target.get_pch('objc') + target.get_pch('objcpp') + # Make sure to use headers (other backends require implementation files like *.c *.cpp, etc; these should not be used here) + pchs = [pch for pch in pchs if pch.endswith('.h') or pch.endswith('.hh') or pch.endswith('hpp')] + if pchs: + if len(pchs) > 1: + mlog.warning(f'Unsupported Xcode configuration: More than 1 precompiled header found "{pchs!s}". Target "{target.name}" might not compile correctly.') + relative_pch_path = os.path.join(target.get_subdir(), pchs[0]) # Path relative to target so it can be used with "$(PROJECT_DIR)" + settings_dict.add_item('GCC_PRECOMPILE_PREFIX_HEADER', 'YES') + settings_dict.add_item('GCC_PREFIX_HEADER', f'"$(PROJECT_DIR)/{relative_pch_path}"') + settings_dict.add_item('GCC_PREPROCESSOR_DEFINITIONS', '""') + settings_dict.add_item('GCC_SYMBOLS_PRIVATE_EXTERN', 'NO') + header_arr = PbxArray() + unquoted_headers = [] + unquoted_headers.append(self.get_target_private_dir_abs(target)) + if target.implicit_include_directories: + unquoted_headers.append(os.path.join(self.environment.get_build_dir(), target.get_subdir())) + unquoted_headers.append(os.path.join(self.environment.get_source_dir(), target.get_subdir())) + if headerdirs: + for i in headerdirs: + i = os.path.normpath(i) + unquoted_headers.append(i) + for i in unquoted_headers: + header_arr.add_item(f'"\\"{i}\\""') + settings_dict.add_item('HEADER_SEARCH_PATHS', header_arr) + settings_dict.add_item('INSTALL_PATH', f'"{install_path}"') + settings_dict.add_item('LIBRARY_SEARCH_PATHS', '""') + if isinstance(target, build.SharedModule): + settings_dict.add_item('LIBRARY_STYLE', 'BUNDLE') + settings_dict.add_item('MACH_O_TYPE', 'mh_bundle') + elif isinstance(target, build.SharedLibrary): + settings_dict.add_item('LIBRARY_STYLE', 'DYNAMIC') + self.add_otherargs(settings_dict, langargs) + settings_dict.add_item('OTHER_LDFLAGS', f'"{ldstr}"') + settings_dict.add_item('OTHER_REZFLAGS', '""') + if ' ' in product_name: + settings_dict.add_item('PRODUCT_NAME', f'"{product_name}"') + else: + settings_dict.add_item('PRODUCT_NAME', product_name) + settings_dict.add_item('SECTORDER_FLAGS', '""') + settings_dict.add_item('SYMROOT', f'"{symroot}"') + sysheader_arr = PbxArray() + # XCode will change every -I flag that points inside these directories + # to an -isystem. Thus set nothing in it since we control our own + # include flags. + settings_dict.add_item('SYSTEM_HEADER_SEARCH_PATHS', sysheader_arr) + settings_dict.add_item('USE_HEADERMAP', 'NO') + warn_array = PbxArray() + settings_dict.add_item('WARNING_CFLAGS', warn_array) + warn_array.add_item('"$(inherited)"') + bt_dict.add_item('name', buildtype) + + def add_otherargs(self, settings_dict, langargs): + for langname, args in langargs.items(): + if args: + quoted_args = [] + for a in args: + # This works but + # a) it's ugly as sin + # b) I don't know why it works or why every backslash must be escaped into eight backslashes + a = a.replace(chr(92), 8*chr(92)) # chr(92) is backslash, this how we smuggle it in without Python's quoting grabbing it. + a = a.replace(r'"', r'\\\"') + if ' ' in a or "'" in a: + a = r'\"' + a + r'\"' + quoted_args.append(a) + settings_dict.add_item(f'OTHER_{langname}FLAGS', '"' + ' '.join(quoted_args) + '"') + + def generate_xc_configurationList(self, objects_dict: PbxDict) -> None: + # FIXME: sort items + conf_dict = PbxDict() + objects_dict.add_item(self.project_conflist, conf_dict, f'Build configuration list for PBXProject "{self.build.project_name}"') + conf_dict.add_item('isa', 'XCConfigurationList') + confs_arr = PbxArray() + conf_dict.add_item('buildConfigurations', confs_arr) + for buildtype in self.buildtypes: + confs_arr.add_item(self.project_configurations[buildtype], buildtype) + conf_dict.add_item('defaultConfigurationIsVisible', 0) + conf_dict.add_item('defaultConfigurationName', self.buildtype) + + # Now the all target + all_dict = PbxDict() + objects_dict.add_item(self.all_buildconf_id, all_dict, 'Build configuration list for PBXAggregateTarget "ALL_BUILD"') + all_dict.add_item('isa', 'XCConfigurationList') + conf_arr = PbxArray() + all_dict.add_item('buildConfigurations', conf_arr) + for buildtype in self.buildtypes: + conf_arr.add_item(self.buildall_configurations[buildtype], buildtype) + all_dict.add_item('defaultConfigurationIsVisible', 0) + all_dict.add_item('defaultConfigurationName', self.buildtype) + + # Test target + test_dict = PbxDict() + objects_dict.add_item(self.test_buildconf_id, test_dict, 'Build configuration list for PBXAggregateTarget "RUN_TEST"') + test_dict.add_item('isa', 'XCConfigurationList') + conf_arr = PbxArray() + test_dict.add_item('buildConfigurations', conf_arr) + for buildtype in self.buildtypes: + conf_arr.add_item(self.test_configurations[buildtype], buildtype) + test_dict.add_item('defaultConfigurationIsVisible', 0) + test_dict.add_item('defaultConfigurationName', self.buildtype) + + # Regen target + regen_dict = PbxDict() + objects_dict.add_item(self.regen_buildconf_id, test_dict, 'Build configuration list for PBXAggregateTarget "REGENERATE"') + regen_dict.add_item('isa', 'XCConfigurationList') + conf_arr = PbxArray() + regen_dict.add_item('buildConfigurations', conf_arr) + for buildtype in self.buildtypes: + conf_arr.add_item(self.test_configurations[buildtype], buildtype) + regen_dict.add_item('defaultConfigurationIsVisible', 0) + regen_dict.add_item('defaultConfigurationName', self.buildtype) + + for target_name in self.build_targets: + t_dict = PbxDict() + listid = self.buildconflistmap[target_name] + objects_dict.add_item(listid, t_dict, f'Build configuration list for PBXNativeTarget "{target_name}"') + t_dict.add_item('isa', 'XCConfigurationList') + conf_arr = PbxArray() + t_dict.add_item('buildConfigurations', conf_arr) + idval = self.buildconfmap[target_name][self.buildtype] + conf_arr.add_item(idval, self.buildtype) + t_dict.add_item('defaultConfigurationIsVisible', 0) + t_dict.add_item('defaultConfigurationName', self.buildtype) + + for target_name in self.custom_targets: + t_dict = PbxDict() + listid = self.buildconflistmap[target_name] + objects_dict.add_item(listid, t_dict, f'Build configuration list for PBXAggregateTarget "{target_name}"') + t_dict.add_item('isa', 'XCConfigurationList') + conf_arr = PbxArray() + t_dict.add_item('buildConfigurations', conf_arr) + idval = self.buildconfmap[target_name][self.buildtype] + conf_arr.add_item(idval, self.buildtype) + t_dict.add_item('defaultConfigurationIsVisible', 0) + t_dict.add_item('defaultConfigurationName', self.buildtype) + + def generate_prefix(self, pbxdict: PbxDict) -> PbxDict: + pbxdict.add_item('archiveVersion', '1') + pbxdict.add_item('classes', PbxDict()) + pbxdict.add_item('objectVersion', '46') + objects_dict = PbxDict() + pbxdict.add_item('objects', objects_dict) + + return objects_dict + + def generate_suffix(self, pbxdict: PbxDict) -> None: + pbxdict.add_item('rootObject', self.project_uid, 'Project object') diff --git a/vendored-meson/meson/mesonbuild/build.py b/vendored-meson/meson/mesonbuild/build.py new file mode 100644 index 000000000000..0a4160fb1c6b --- /dev/null +++ b/vendored-meson/meson/mesonbuild/build.py @@ -0,0 +1,3120 @@ +# Copyright 2012-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations +from collections import defaultdict, OrderedDict +from dataclasses import dataclass, field, InitVar +from functools import lru_cache +import abc +import hashlib +import itertools, pathlib +import os +import pickle +import re +import textwrap +import typing as T + +from . import coredata +from . import environment +from . import dependencies +from . import mlog +from . import programs +from .mesonlib import ( + HoldableObject, SecondLevelHolder, + File, MesonException, MachineChoice, PerMachine, OrderedSet, listify, + extract_as_list, typeslistify, stringlistify, classify_unity_sources, + get_filenames_templates_dict, substitute_values, has_path_sep, + OptionKey, PerMachineDefaultable, OptionOverrideProxy, + MesonBugException, EnvironmentVariables, pickle_load, +) +from .compilers import ( + is_object, clink_langs, sort_clink, all_languages, + is_known_suffix, detect_static_linker +) +from .interpreterbase import FeatureNew, FeatureDeprecated + +if T.TYPE_CHECKING: + from typing_extensions import Literal + from ._typing import ImmutableListProtocol + from .backend.backends import Backend, ExecutableSerialisation + from .compilers import Compiler + from .interpreter.interpreter import Test, SourceOutputs, Interpreter + from .interpreterbase import SubProject + from .linkers.linkers import StaticLinker + from .mesonlib import FileMode, FileOrString + from .modules import ModuleState + from .mparser import BaseNode + from .wrap import WrapMode + + GeneratedTypes = T.Union['CustomTarget', 'CustomTargetIndex', 'GeneratedList'] + LibTypes = T.Union['SharedLibrary', 'StaticLibrary', 'CustomTarget', 'CustomTargetIndex'] + BuildTargetTypes = T.Union['BuildTarget', 'CustomTarget', 'CustomTargetIndex'] + ObjectTypes = T.Union[str, 'File', 'ExtractedObjects', 'GeneratedTypes'] + +pch_kwargs = {'c_pch', 'cpp_pch'} + +lang_arg_kwargs = {f'{lang}_args' for lang in all_languages} +lang_arg_kwargs |= { + 'd_import_dirs', + 'd_unittest', + 'd_module_versions', + 'd_debug', +} + +vala_kwargs = {'vala_header', 'vala_gir', 'vala_vapi'} +rust_kwargs = {'rust_crate_type', 'rust_dependency_map'} +cs_kwargs = {'resources', 'cs_args'} + +buildtarget_kwargs = { + 'build_by_default', + 'build_rpath', + 'dependencies', + 'extra_files', + 'gui_app', + 'link_with', + 'link_whole', + 'link_args', + 'link_depends', + 'implicit_include_directories', + 'include_directories', + 'install', + 'install_rpath', + 'install_dir', + 'install_mode', + 'install_tag', + 'name_prefix', + 'name_suffix', + 'native', + 'objects', + 'override_options', + 'sources', + 'gnu_symbol_visibility', + 'link_language', + 'win_subsystem', +} + +known_build_target_kwargs = ( + buildtarget_kwargs | + lang_arg_kwargs | + pch_kwargs | + vala_kwargs | + rust_kwargs | + cs_kwargs) + +known_exe_kwargs = known_build_target_kwargs | {'implib', 'export_dynamic', 'pie'} +known_shlib_kwargs = known_build_target_kwargs | {'version', 'soversion', 'vs_module_defs', 'darwin_versions'} +known_shmod_kwargs = known_build_target_kwargs | {'vs_module_defs'} +known_stlib_kwargs = known_build_target_kwargs | {'pic', 'prelink'} +known_jar_kwargs = known_exe_kwargs | {'main_class', 'java_resources'} + +def _process_install_tag(install_tag: T.Optional[T.List[T.Optional[str]]], + num_outputs: int) -> T.List[T.Optional[str]]: + _install_tag: T.List[T.Optional[str]] + if not install_tag: + _install_tag = [None] * num_outputs + elif len(install_tag) == 1: + _install_tag = install_tag * num_outputs + else: + _install_tag = install_tag + return _install_tag + + +@lru_cache(maxsize=None) +def get_target_macos_dylib_install_name(ld) -> str: + name = ['@rpath/', ld.prefix, ld.name] + if ld.soversion is not None: + name.append('.' + ld.soversion) + name.append('.dylib') + return ''.join(name) + +class InvalidArguments(MesonException): + pass + +@dataclass(eq=False) +class DependencyOverride(HoldableObject): + dep: dependencies.Dependency + node: 'BaseNode' + explicit: bool = True + +@dataclass(eq=False) +class Headers(HoldableObject): + sources: T.List[File] + install_subdir: T.Optional[str] + custom_install_dir: T.Optional[str] + custom_install_mode: 'FileMode' + subproject: str + + # TODO: we really don't need any of these methods, but they're preserved to + # keep APIs relying on them working. + + def set_install_subdir(self, subdir: str) -> None: + self.install_subdir = subdir + + def get_install_subdir(self) -> T.Optional[str]: + return self.install_subdir + + def get_sources(self) -> T.List[File]: + return self.sources + + def get_custom_install_dir(self) -> T.Optional[str]: + return self.custom_install_dir + + def get_custom_install_mode(self) -> 'FileMode': + return self.custom_install_mode + + +@dataclass(eq=False) +class Man(HoldableObject): + sources: T.List[File] + custom_install_dir: T.Optional[str] + custom_install_mode: 'FileMode' + subproject: str + locale: T.Optional[str] + + def get_custom_install_dir(self) -> T.Optional[str]: + return self.custom_install_dir + + def get_custom_install_mode(self) -> 'FileMode': + return self.custom_install_mode + + def get_sources(self) -> T.List['File']: + return self.sources + + +@dataclass(eq=False) +class EmptyDir(HoldableObject): + path: str + install_mode: 'FileMode' + subproject: str + install_tag: T.Optional[str] = None + + +@dataclass(eq=False) +class InstallDir(HoldableObject): + source_subdir: str + installable_subdir: str + install_dir: str + install_dir_name: str + install_mode: 'FileMode' + exclude: T.Tuple[T.Set[str], T.Set[str]] + strip_directory: bool + subproject: str + from_source_dir: bool = True + install_tag: T.Optional[str] = None + +@dataclass(eq=False) +class DepManifest: + version: str + license: T.List[str] + license_files: T.List[T.Tuple[str, File]] + subproject: str + + def to_json(self) -> T.Dict[str, T.Union[str, T.List[str]]]: + return { + 'version': self.version, + 'license': self.license, + 'license_files': [l[1].relative_name() for l in self.license_files], + } + + +# literally everything isn't dataclass stuff +class Build: + """A class that holds the status of one build including + all dependencies and so on. + """ + + def __init__(self, environment: environment.Environment): + self.version = coredata.version + self.project_name = 'name of master project' + self.project_version = None + self.environment = environment + self.projects = {} + self.targets: 'T.OrderedDict[str, T.Union[CustomTarget, BuildTarget]]' = OrderedDict() + self.global_args: PerMachine[T.Dict[str, T.List[str]]] = PerMachine({}, {}) + self.global_link_args: PerMachine[T.Dict[str, T.List[str]]] = PerMachine({}, {}) + self.projects_args: PerMachine[T.Dict[str, T.Dict[str, T.List[str]]]] = PerMachine({}, {}) + self.projects_link_args: PerMachine[T.Dict[str, T.Dict[str, T.List[str]]]] = PerMachine({}, {}) + self.tests: T.List['Test'] = [] + self.benchmarks: T.List['Test'] = [] + self.headers: T.List[Headers] = [] + self.man: T.List[Man] = [] + self.emptydir: T.List[EmptyDir] = [] + self.data: T.List[Data] = [] + self.symlinks: T.List[SymlinkData] = [] + self.static_linker: PerMachine[StaticLinker] = PerMachine(None, None) + self.subprojects = {} + self.subproject_dir = '' + self.install_scripts: T.List['ExecutableSerialisation'] = [] + self.postconf_scripts: T.List['ExecutableSerialisation'] = [] + self.dist_scripts: T.List['ExecutableSerialisation'] = [] + self.install_dirs: T.List[InstallDir] = [] + self.dep_manifest_name: T.Optional[str] = None + self.dep_manifest: T.Dict[str, DepManifest] = {} + self.stdlibs = PerMachine({}, {}) + self.test_setups: T.Dict[str, TestSetup] = {} + self.test_setup_default_name = None + self.find_overrides: T.Dict[str, T.Union['Executable', programs.ExternalProgram, programs.OverrideProgram]] = {} + self.searched_programs: T.Set[str] = set() # The list of all programs that have been searched for. + + # If we are doing a cross build we need two caches, if we're doing a + # build == host compilation the both caches should point to the same place. + self.dependency_overrides: PerMachine[T.Dict[T.Tuple, DependencyOverride]] = PerMachineDefaultable.default( + environment.is_cross_build(), {}, {}) + self.devenv: T.List[EnvironmentVariables] = [] + self.modules: T.List[str] = [] + + def get_build_targets(self): + build_targets = OrderedDict() + for name, t in self.targets.items(): + if isinstance(t, BuildTarget): + build_targets[name] = t + return build_targets + + def get_custom_targets(self): + custom_targets = OrderedDict() + for name, t in self.targets.items(): + if isinstance(t, CustomTarget): + custom_targets[name] = t + return custom_targets + + def copy(self) -> Build: + other = Build(self.environment) + for k, v in self.__dict__.items(): + if isinstance(v, (list, dict, set, OrderedDict)): + other.__dict__[k] = v.copy() + else: + other.__dict__[k] = v + return other + + def merge(self, other: Build) -> None: + for k, v in other.__dict__.items(): + self.__dict__[k] = v + + def ensure_static_linker(self, compiler: Compiler) -> None: + if self.static_linker[compiler.for_machine] is None and compiler.needs_static_linker(): + self.static_linker[compiler.for_machine] = detect_static_linker(self.environment, compiler) + + def get_project(self): + return self.projects[''] + + def get_subproject_dir(self): + return self.subproject_dir + + def get_targets(self) -> 'T.OrderedDict[str, T.Union[CustomTarget, BuildTarget]]': + return self.targets + + def get_tests(self) -> T.List['Test']: + return self.tests + + def get_benchmarks(self) -> T.List['Test']: + return self.benchmarks + + def get_headers(self) -> T.List['Headers']: + return self.headers + + def get_man(self) -> T.List['Man']: + return self.man + + def get_data(self) -> T.List['Data']: + return self.data + + def get_symlinks(self) -> T.List['SymlinkData']: + return self.symlinks + + def get_emptydir(self) -> T.List['EmptyDir']: + return self.emptydir + + def get_install_subdirs(self) -> T.List['InstallDir']: + return self.install_dirs + + def get_global_args(self, compiler: 'Compiler', for_machine: 'MachineChoice') -> T.List[str]: + d = self.global_args[for_machine] + return d.get(compiler.get_language(), []) + + def get_project_args(self, compiler: 'Compiler', project: str, for_machine: 'MachineChoice') -> T.List[str]: + d = self.projects_args[for_machine] + args = d.get(project) + if not args: + return [] + return args.get(compiler.get_language(), []) + + def get_global_link_args(self, compiler: 'Compiler', for_machine: 'MachineChoice') -> T.List[str]: + d = self.global_link_args[for_machine] + return d.get(compiler.get_language(), []) + + def get_project_link_args(self, compiler: 'Compiler', project: str, for_machine: 'MachineChoice') -> T.List[str]: + d = self.projects_link_args[for_machine] + + link_args = d.get(project) + if not link_args: + return [] + + return link_args.get(compiler.get_language(), []) + +@dataclass(eq=False) +class IncludeDirs(HoldableObject): + + """Internal representation of an include_directories call.""" + + curdir: str + incdirs: T.List[str] + is_system: bool + # Interpreter has validated that all given directories + # actually exist. + extra_build_dirs: T.List[str] = field(default_factory=list) + + def __repr__(self) -> str: + r = '<{} {}/{}>' + return r.format(self.__class__.__name__, self.curdir, self.incdirs) + + def get_curdir(self) -> str: + return self.curdir + + def get_incdirs(self) -> T.List[str]: + return self.incdirs + + def get_extra_build_dirs(self) -> T.List[str]: + return self.extra_build_dirs + + def to_string_list(self, sourcedir: str, builddir: T.Optional[str] = None) -> T.List[str]: + """Convert IncludeDirs object to a list of strings. + + :param sourcedir: The absolute source directory + :param builddir: The absolute build directory, option, build dir will not + be added if this is unset + :returns: A list of strings (without compiler argument) + """ + strlist: T.List[str] = [] + for idir in self.incdirs: + strlist.append(os.path.join(sourcedir, self.curdir, idir)) + if builddir: + strlist.append(os.path.join(builddir, self.curdir, idir)) + return strlist + +@dataclass(eq=False) +class ExtractedObjects(HoldableObject): + ''' + Holds a list of sources for which the objects must be extracted + ''' + target: 'BuildTarget' + srclist: T.List[File] = field(default_factory=list) + genlist: T.List['GeneratedTypes'] = field(default_factory=list) + objlist: T.List[T.Union[str, 'File', 'ExtractedObjects']] = field(default_factory=list) + recursive: bool = True + pch: bool = False + + def __post_init__(self) -> None: + if self.target.is_unity: + self.check_unity_compatible() + + def __repr__(self) -> str: + r = '<{0} {1!r}: {2}>' + return r.format(self.__class__.__name__, self.target.name, self.srclist) + + @staticmethod + def get_sources(sources: T.Sequence['FileOrString'], generated_sources: T.Sequence['GeneratedTypes']) -> T.List['FileOrString']: + # Merge sources and generated sources + sources = list(sources) + for gensrc in generated_sources: + for s in gensrc.get_outputs(): + # We cannot know the path where this source will be generated, + # but all we need here is the file extension to determine the + # compiler. + sources.append(s) + + # Filter out headers and all non-source files + return [s for s in sources if environment.is_source(s)] + + def classify_all_sources(self, sources: T.List[FileOrString], generated_sources: T.Sequence['GeneratedTypes']) -> T.Dict['Compiler', T.List['FileOrString']]: + sources_ = self.get_sources(sources, generated_sources) + return classify_unity_sources(self.target.compilers.values(), sources_) + + def check_unity_compatible(self) -> None: + # Figure out if the extracted object list is compatible with a Unity + # build. When we're doing a Unified build, we go through the sources, + # and create a single source file from each subset of the sources that + # can be compiled with a specific compiler. Then we create one object + # from each unified source file. So for each compiler we can either + # extra all its sources or none. + cmpsrcs = self.classify_all_sources(self.target.sources, self.target.generated) + extracted_cmpsrcs = self.classify_all_sources(self.srclist, self.genlist) + + for comp, srcs in extracted_cmpsrcs.items(): + if set(srcs) != set(cmpsrcs[comp]): + raise MesonException('Single object files cannot be extracted ' + 'in Unity builds. You can only extract all ' + 'the object files for each compiler at once.') + + +@dataclass(eq=False, order=False) +class StructuredSources(HoldableObject): + + """A container for sources in languages that use filesystem hierarchy. + + Languages like Rust and Cython rely on the layout of files in the filesystem + as part of the compiler implementation. This structure allows us to + represent the required filesystem layout. + """ + + sources: T.DefaultDict[str, T.List[T.Union[File, CustomTarget, CustomTargetIndex, GeneratedList]]] = field( + default_factory=lambda: defaultdict(list)) + + def __add__(self, other: StructuredSources) -> StructuredSources: + sources = self.sources.copy() + for k, v in other.sources.items(): + sources[k].extend(v) + return StructuredSources(sources) + + def __bool__(self) -> bool: + return bool(self.sources) + + def first_file(self) -> T.Union[File, CustomTarget, CustomTargetIndex, GeneratedList]: + """Get the first source in the root + + :return: The first source in the root + """ + return self.sources[''][0] + + def as_list(self) -> T.List[T.Union[File, CustomTarget, CustomTargetIndex, GeneratedList]]: + return list(itertools.chain.from_iterable(self.sources.values())) + + def needs_copy(self) -> bool: + """Do we need to create a structure in the build directory. + + This allows us to avoid making copies if the structures exists in the + source dir. Which could happen in situations where a generated source + only exists in some configurations + """ + for files in self.sources.values(): + for f in files: + if isinstance(f, File): + if f.is_built: + return True + else: + return True + return False + + +@dataclass(eq=False) +class Target(HoldableObject, metaclass=abc.ABCMeta): + + name: str + subdir: str + subproject: 'SubProject' + build_by_default: bool + for_machine: MachineChoice + environment: environment.Environment + install: bool = False + build_always_stale: bool = False + extra_files: T.List[File] = field(default_factory=list) + override_options: InitVar[T.Optional[T.Dict[OptionKey, str]]] = None + + @abc.abstractproperty + def typename(self) -> str: + pass + + @abc.abstractmethod + def type_suffix(self) -> str: + pass + + def __post_init__(self, overrides: T.Optional[T.Dict[OptionKey, str]]) -> None: + if overrides: + ovr = {k.evolve(machine=self.for_machine) if k.lang else k: v + for k, v in overrides.items()} + else: + ovr = {} + self.options = OptionOverrideProxy(ovr, self.environment.coredata.options, self.subproject) + # XXX: this should happen in the interpreter + if has_path_sep(self.name): + # Fix failing test 53 when this becomes an error. + mlog.warning(textwrap.dedent(f'''\ + Target "{self.name}" has a path separator in its name. + This is not supported, it can cause unexpected failures and will become + a hard error in the future.''')) + + # dataclass comparators? + def __lt__(self, other: object) -> bool: + if not isinstance(other, Target): + return NotImplemented + return self.get_id() < other.get_id() + + def __le__(self, other: object) -> bool: + if not isinstance(other, Target): + return NotImplemented + return self.get_id() <= other.get_id() + + def __gt__(self, other: object) -> bool: + if not isinstance(other, Target): + return NotImplemented + return self.get_id() > other.get_id() + + def __ge__(self, other: object) -> bool: + if not isinstance(other, Target): + return NotImplemented + return self.get_id() >= other.get_id() + + def get_default_install_dir(self) -> T.Tuple[str, str]: + raise NotImplementedError + + def get_custom_install_dir(self) -> T.List[T.Union[str, Literal[False]]]: + raise NotImplementedError + + def get_install_dir(self) -> T.Tuple[T.List[T.Union[str, Literal[False]]], str, Literal[False]]: + # Find the installation directory. + default_install_dir, default_install_dir_name = self.get_default_install_dir() + outdirs = self.get_custom_install_dir() + if outdirs and outdirs[0] != default_install_dir and outdirs[0] is not True: + # Either the value is set to a non-default value, or is set to + # False (which means we want this specific output out of many + # outputs to not be installed). + custom_install_dir = True + install_dir_names = [getattr(i, 'optname', None) for i in outdirs] + else: + custom_install_dir = False + # if outdirs is empty we need to set to something, otherwise we set + # only the first value to the default. + if outdirs: + outdirs[0] = default_install_dir + else: + outdirs = [default_install_dir] + install_dir_names = [default_install_dir_name] * len(outdirs) + + return outdirs, install_dir_names, custom_install_dir + + def get_basename(self) -> str: + return self.name + + def get_subdir(self) -> str: + return self.subdir + + def get_typename(self) -> str: + return self.typename + + @staticmethod + def _get_id_hash(target_id: str) -> str: + # We don't really need cryptographic security here. + # Small-digest hash function with unlikely collision is good enough. + h = hashlib.sha256() + h.update(target_id.encode(encoding='utf-8', errors='replace')) + # This ID should be case-insensitive and should work in Visual Studio, + # e.g. it should not start with leading '-'. + return h.hexdigest()[:7] + + @staticmethod + def construct_id_from_path(subdir: str, name: str, type_suffix: str) -> str: + """Construct target ID from subdir, name and type suffix. + + This helper function is made public mostly for tests.""" + # This ID must also be a valid file name on all OSs. + # It should also avoid shell metacharacters for obvious + # reasons. '@' is not used as often as '_' in source code names. + # In case of collisions consider using checksums. + # FIXME replace with assert when slash in names is prohibited + name_part = name.replace('/', '@').replace('\\', '@') + assert not has_path_sep(type_suffix) + my_id = name_part + type_suffix + if subdir: + subdir_part = Target._get_id_hash(subdir) + # preserve myid for better debuggability + return subdir_part + '@@' + my_id + return my_id + + def get_id(self) -> str: + return self.construct_id_from_path( + self.subdir, self.name, self.type_suffix()) + + def process_kwargs_base(self, kwargs: T.Dict[str, T.Any]) -> None: + if 'build_by_default' in kwargs: + self.build_by_default = kwargs['build_by_default'] + if not isinstance(self.build_by_default, bool): + raise InvalidArguments('build_by_default must be a boolean value.') + elif kwargs.get('install', False): + # For backward compatibility, if build_by_default is not explicitly + # set, use the value of 'install' if it's enabled. + self.build_by_default = True + + self.set_option_overrides(self.parse_overrides(kwargs)) + + def set_option_overrides(self, option_overrides: T.Dict[OptionKey, str]) -> None: + self.options.overrides = {} + for k, v in option_overrides.items(): + if k.lang: + self.options.overrides[k.evolve(machine=self.for_machine)] = v + else: + self.options.overrides[k] = v + + def get_options(self) -> OptionOverrideProxy: + return self.options + + def get_option(self, key: 'OptionKey') -> T.Union[str, int, bool, 'WrapMode']: + # We don't actually have wrapmode here to do an assert, so just do a + # cast, we know what's in coredata anyway. + # TODO: if it's possible to annotate get_option or validate_option_value + # in the future we might be able to remove the cast here + return T.cast('T.Union[str, int, bool, WrapMode]', self.options[key].value) + + @staticmethod + def parse_overrides(kwargs: T.Dict[str, T.Any]) -> T.Dict[OptionKey, str]: + opts = kwargs.get('override_options', []) + + # In this case we have an already parsed and ready to go dictionary + # provided by typed_kwargs + if isinstance(opts, dict): + return T.cast('T.Dict[OptionKey, str]', opts) + + result: T.Dict[OptionKey, str] = {} + overrides = stringlistify(opts) + for o in overrides: + if '=' not in o: + raise InvalidArguments('Overrides must be of form "key=value"') + k, v = o.split('=', 1) + key = OptionKey.from_string(k.strip()) + v = v.strip() + result[key] = v + return result + + def is_linkable_target(self) -> bool: + return False + + def get_outputs(self) -> T.List[str]: + return [] + + def should_install(self) -> bool: + return False + +class BuildTarget(Target): + known_kwargs = known_build_target_kwargs + + install_dir: T.List[T.Union[str, Literal[False]]] + + # This set contains all the languages a linker can link natively + # without extra flags. For instance, nvcc (cuda) can link C++ + # without injecting -lc++/-lstdc++, see + # https://github.com/mesonbuild/meson/issues/10570 + _MASK_LANGS: T.FrozenSet[T.Tuple[str, str]] = frozenset([ + # (language, linker) + ('cpp', 'cuda'), + ]) + + def __init__( + self, + name: str, + subdir: str, + subproject: SubProject, + for_machine: MachineChoice, + sources: T.List['SourceOutputs'], + structured_sources: T.Optional[StructuredSources], + objects: T.List[ObjectTypes], + environment: environment.Environment, + compilers: T.Dict[str, 'Compiler'], + kwargs): + super().__init__(name, subdir, subproject, True, for_machine, environment) + self.all_compilers = compilers + self.compilers = OrderedDict() # type: OrderedDict[str, Compiler] + self.objects: T.List[ObjectTypes] = [] + self.structured_sources = structured_sources + self.external_deps: T.List[dependencies.Dependency] = [] + self.include_dirs: T.List['IncludeDirs'] = [] + self.link_language = kwargs.get('link_language') + self.link_targets: T.List[LibTypes] = [] + self.link_whole_targets: T.List[T.Union[StaticLibrary, CustomTarget, CustomTargetIndex]] = [] + self.link_depends = [] + self.added_deps = set() + self.name_prefix_set = False + self.name_suffix_set = False + self.filename = 'no_name' + # The list of all files outputted by this target. Useful in cases such + # as Vala which generates .vapi and .h besides the compiled output. + self.outputs = [self.filename] + self.need_install = False + self.pch: T.Dict[str, T.List[str]] = {} + self.extra_args: T.Dict[str, T.List['FileOrString']] = {} + self.sources: T.List[File] = [] + self.generated: T.List['GeneratedTypes'] = [] + self.extra_files: T.List[File] = [] + self.d_features = defaultdict(list) + self.pic = False + self.pie = False + # Track build_rpath entries so we can remove them at install time + self.rpath_dirs_to_remove: T.Set[bytes] = set() + self.process_sourcelist(sources) + # Objects can be: + # 1. Preexisting objects provided by the user with the `objects:` kwarg + # 2. Compiled objects created by and extracted from another target + self.process_objectlist(objects) + self.process_kwargs(kwargs) + self.missing_languages = self.process_compilers() + + # self.link_targets and self.link_whole_targets contains libraries from + # dependencies (see add_deps()). They have not been processed yet because + # we have to call process_compilers() first and we need to process libraries + # from link_with and link_whole first. + # See https://github.com/mesonbuild/meson/pull/11957#issuecomment-1629243208. + link_targets = extract_as_list(kwargs, 'link_with') + self.link_targets + link_whole_targets = extract_as_list(kwargs, 'link_whole') + self.link_whole_targets + self.link_targets.clear() + self.link_whole_targets.clear() + self.link(link_targets) + self.link_whole(link_whole_targets) + + if not any([self.sources, self.generated, self.objects, self.link_whole_targets, self.structured_sources, + kwargs.pop('_allow_no_sources', False)]): + mlog.warning(f'Build target {name} has no sources. ' + 'This was never supposed to be allowed but did because of a bug, ' + 'support will be removed in a future release of Meson') + self.check_unknown_kwargs(kwargs) + self.validate_install() + self.check_module_linking() + + def post_init(self) -> None: + ''' Initialisations and checks requiring the final list of compilers to be known + ''' + self.validate_sources() + if self.structured_sources and any([self.sources, self.generated]): + raise MesonException('cannot mix structured sources and unstructured sources') + if self.structured_sources and 'rust' not in self.compilers: + raise MesonException('structured sources are only supported in Rust targets') + if self.uses_rust(): + # relocation-model=pic is rustc's default and Meson does not + # currently have a way to disable PIC. + self.pic = True + + def __repr__(self): + repr_str = "<{0} {1}: {2}>" + return repr_str.format(self.__class__.__name__, self.get_id(), self.filename) + + def __str__(self): + return f"{self.name}" + + @property + def is_unity(self) -> bool: + unity_opt = self.get_option(OptionKey('unity')) + return unity_opt == 'on' or (unity_opt == 'subprojects' and self.subproject != '') + + def validate_install(self): + if self.for_machine is MachineChoice.BUILD and self.need_install: + if self.environment.is_cross_build(): + raise InvalidArguments('Tried to install a target for the build machine in a cross build.') + else: + mlog.warning('Installing target build for the build machine. This will fail in a cross build.') + + def check_unknown_kwargs(self, kwargs): + # Override this method in derived classes that have more + # keywords. + self.check_unknown_kwargs_int(kwargs, self.known_kwargs) + + def check_unknown_kwargs_int(self, kwargs, known_kwargs): + unknowns = [] + for k in kwargs: + if k not in known_kwargs: + unknowns.append(k) + if len(unknowns) > 0: + mlog.warning('Unknown keyword argument(s) in target {}: {}.'.format(self.name, ', '.join(unknowns))) + + def process_objectlist(self, objects): + assert isinstance(objects, list) + for s in objects: + if isinstance(s, (str, File, ExtractedObjects)): + self.objects.append(s) + elif isinstance(s, (CustomTarget, CustomTargetIndex, GeneratedList)): + non_objects = [o for o in s.get_outputs() if not is_object(o)] + if non_objects: + raise InvalidArguments(f'Generated file {non_objects[0]} in the \'objects\' kwarg is not an object.') + self.generated.append(s) + else: + raise InvalidArguments(f'Bad object of type {type(s).__name__!r} in target {self.name!r}.') + + def process_sourcelist(self, sources: T.List['SourceOutputs']) -> None: + """Split sources into generated and static sources. + + Sources can be: + 1. Preexisting source files in the source tree (static) + 2. Preexisting sources generated by configure_file in the build tree. + (static as they are only regenerated if meson itself is regenerated) + 3. Sources files generated by another target or a Generator (generated) + """ + added_sources: T.Set[File] = set() # If the same source is defined multiple times, use it only once. + for s in sources: + if isinstance(s, File): + if s not in added_sources: + self.sources.append(s) + added_sources.add(s) + elif isinstance(s, (CustomTarget, CustomTargetIndex, GeneratedList)): + self.generated.append(s) + + @staticmethod + def can_compile_remove_sources(compiler: 'Compiler', sources: T.List['FileOrString']) -> bool: + removed = False + for s in sources[:]: + if compiler.can_compile(s): + sources.remove(s) + removed = True + return removed + + def process_compilers_late(self): + """Processes additional compilers after kwargs have been evaluated. + + This can add extra compilers that might be required by keyword + arguments, such as link_with or dependencies. It will also try to guess + which compiler to use if one hasn't been selected already. + """ + for lang in self.missing_languages: + self.compilers[lang] = self.all_compilers[lang] + + # did user override clink_langs for this target? + link_langs = [self.link_language] if self.link_language else clink_langs + + # If this library is linked against another library we need to consider + # the languages of those libraries as well. + if self.link_targets or self.link_whole_targets: + for t in itertools.chain(self.link_targets, self.link_whole_targets): + if isinstance(t, (CustomTarget, CustomTargetIndex)): + continue # We can't know anything about these. + for name, compiler in t.compilers.items(): + if name in link_langs and name not in self.compilers: + self.compilers[name] = compiler + + if not self.compilers: + # No source files or parent targets, target consists of only object + # files of unknown origin. Just add the first clink compiler + # that we have and hope that it can link these objects + for lang in link_langs: + if lang in self.all_compilers: + self.compilers[lang] = self.all_compilers[lang] + break + + # Now that we have the final list of compilers we can sort it according + # to clink_langs and do sanity checks. + self.compilers = OrderedDict(sorted(self.compilers.items(), + key=lambda t: sort_clink(t[0]))) + self.post_init() + + def process_compilers(self) -> T.List[str]: + ''' + Populate self.compilers, which is the list of compilers that this + target will use for compiling all its sources. + We also add compilers that were used by extracted objects to simplify + dynamic linker determination. + Returns a list of missing languages that we can add implicitly, such as + C/C++ compiler for cython. + ''' + missing_languages: T.List[str] = [] + if not any([self.sources, self.generated, self.objects, self.structured_sources]): + return missing_languages + # Preexisting sources + sources: T.List['FileOrString'] = list(self.sources) + generated = self.generated.copy() + + if self.structured_sources: + for v in self.structured_sources.sources.values(): + for src in v: + if isinstance(src, (str, File)): + sources.append(src) + else: + generated.append(src) + + # All generated sources + for gensrc in generated: + for s in gensrc.get_outputs(): + # Generated objects can't be compiled, so don't use them for + # compiler detection. If our target only has generated objects, + # we will fall back to using the first c-like compiler we find, + # which is what we need. + if not is_object(s): + sources.append(s) + for d in self.external_deps: + for s in d.sources: + if isinstance(s, (str, File)): + sources.append(s) + + # Sources that were used to create our extracted objects + for o in self.objects: + if not isinstance(o, ExtractedObjects): + continue + compsrcs = o.classify_all_sources(o.srclist, []) + for comp in compsrcs: + # Don't add Vala sources since that will pull in the Vala + # compiler even though we will never use it since we are + # dealing with compiled C code. + if comp.language == 'vala': + continue + if comp.language not in self.compilers: + self.compilers[comp.language] = comp + if sources: + # For each source, try to add one compiler that can compile it. + # + # If it has a suffix that belongs to a known language, we must have + # a compiler for that language. + # + # Otherwise, it's ok if no compilers can compile it, because users + # are expected to be able to add arbitrary non-source files to the + # sources list + for s in sources: + for lang, compiler in self.all_compilers.items(): + if compiler.can_compile(s): + if lang not in self.compilers: + self.compilers[lang] = compiler + break + else: + if is_known_suffix(s): + path = pathlib.Path(str(s)).as_posix() + m = f'No {self.for_machine.get_lower_case_name()} machine compiler for {path!r}' + raise MesonException(m) + + # If all our sources are Vala, our target also needs the C compiler but + # it won't get added above. + if 'vala' in self.compilers and 'c' not in self.compilers: + self.compilers['c'] = self.all_compilers['c'] + if 'cython' in self.compilers: + key = OptionKey('language', machine=self.for_machine, lang='cython') + value = self.get_option(key) + + try: + self.compilers[value] = self.all_compilers[value] + except KeyError: + missing_languages.append(value) + + return missing_languages + + def validate_sources(self): + if len(self.compilers) > 1 and any(lang in self.compilers for lang in ['cs', 'java']): + langs = ', '.join(self.compilers.keys()) + raise InvalidArguments(f'Cannot mix those languages into a target: {langs}') + + def process_link_depends(self, sources): + """Process the link_depends keyword argument. + + This is designed to handle strings, Files, and the output of Custom + Targets. Notably it doesn't handle generator() returned objects, since + adding them as a link depends would inherently cause them to be + generated twice, since the output needs to be passed to the ld_args and + link_depends. + """ + sources = listify(sources) + for s in sources: + if isinstance(s, File): + self.link_depends.append(s) + elif isinstance(s, str): + self.link_depends.append( + File.from_source_file(self.environment.source_dir, self.subdir, s)) + elif hasattr(s, 'get_outputs'): + self.link_depends.append(s) + else: + raise InvalidArguments( + 'Link_depends arguments must be strings, Files, ' + 'or a Custom Target, or lists thereof.') + + def extract_objects(self, srclist: T.List[T.Union['FileOrString', 'GeneratedTypes']]) -> ExtractedObjects: + sources_set = set(self.sources) + generated_set = set(self.generated) + + obj_src: T.List['File'] = [] + obj_gen: T.List['GeneratedTypes'] = [] + for src in srclist: + if isinstance(src, (str, File)): + if isinstance(src, str): + src = File(False, self.subdir, src) + else: + FeatureNew.single_use('File argument for extract_objects', '0.50.0', self.subproject) + if src not in sources_set: + raise MesonException(f'Tried to extract unknown source {src}.') + obj_src.append(src) + elif isinstance(src, (CustomTarget, CustomTargetIndex, GeneratedList)): + FeatureNew.single_use('Generated sources for extract_objects', '0.61.0', self.subproject) + target = src.target if isinstance(src, CustomTargetIndex) else src + if src not in generated_set and target not in generated_set: + raise MesonException(f'Tried to extract unknown source {target.get_basename()}.') + obj_gen.append(src) + else: + raise MesonException(f'Object extraction arguments must be strings, Files or targets (got {type(src).__name__}).') + return ExtractedObjects(self, obj_src, obj_gen) + + def extract_all_objects(self, recursive: bool = True) -> ExtractedObjects: + return ExtractedObjects(self, self.sources, self.generated, self.objects, + recursive, pch=True) + + def get_all_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]: + return self.get_transitive_link_deps() + + @lru_cache(maxsize=None) + def get_transitive_link_deps(self) -> ImmutableListProtocol[BuildTargetTypes]: + result: T.List[Target] = [] + for i in self.link_targets: + result += i.get_all_link_deps() + return result + + def get_link_deps_mapping(self, prefix: str) -> T.Mapping[str, str]: + return self.get_transitive_link_deps_mapping(prefix) + + @lru_cache(maxsize=None) + def get_transitive_link_deps_mapping(self, prefix: str) -> T.Mapping[str, str]: + result: T.Dict[str, str] = {} + for i in self.link_targets: + mapping = i.get_link_deps_mapping(prefix) + #we are merging two dictionaries, while keeping the earlier one dominant + result_tmp = mapping.copy() + result_tmp.update(result) + result = result_tmp + return result + + @lru_cache(maxsize=None) + def get_link_dep_subdirs(self) -> T.AbstractSet[str]: + result: OrderedSet[str] = OrderedSet() + for i in self.link_targets: + if not isinstance(i, StaticLibrary): + result.add(i.get_subdir()) + result.update(i.get_link_dep_subdirs()) + return result + + def get_default_install_dir(self) -> T.Tuple[str, str]: + return self.environment.get_libdir(), '{libdir}' + + def get_custom_install_dir(self) -> T.List[T.Union[str, Literal[False]]]: + return self.install_dir + + def get_custom_install_mode(self) -> T.Optional['FileMode']: + return self.install_mode + + def process_kwargs(self, kwargs): + self.process_kwargs_base(kwargs) + self.original_kwargs = kwargs + kwargs.get('modules', []) + self.need_install = kwargs.get('install', self.need_install) + + for lang in all_languages: + lang_args = extract_as_list(kwargs, f'{lang}_args') + self.add_compiler_args(lang, lang_args) + + self.add_pch('c', extract_as_list(kwargs, 'c_pch')) + self.add_pch('cpp', extract_as_list(kwargs, 'cpp_pch')) + + if not isinstance(self, Executable) or kwargs.get('export_dynamic', False): + self.vala_header = kwargs.get('vala_header', self.name + '.h') + self.vala_vapi = kwargs.get('vala_vapi', self.name + '.vapi') + self.vala_gir = kwargs.get('vala_gir', None) + + dfeatures = defaultdict(list) + dfeature_unittest = kwargs.get('d_unittest', False) + if dfeature_unittest: + dfeatures['unittest'] = dfeature_unittest + dfeature_versions = kwargs.get('d_module_versions', []) + if dfeature_versions: + dfeatures['versions'] = dfeature_versions + dfeature_debug = kwargs.get('d_debug', []) + if dfeature_debug: + dfeatures['debug'] = dfeature_debug + if 'd_import_dirs' in kwargs: + dfeature_import_dirs = extract_as_list(kwargs, 'd_import_dirs') + for d in dfeature_import_dirs: + if not isinstance(d, IncludeDirs): + raise InvalidArguments('Arguments to d_import_dirs must be include_directories.') + dfeatures['import_dirs'] = dfeature_import_dirs + if dfeatures: + self.d_features = dfeatures + + self.link_args = extract_as_list(kwargs, 'link_args') + for i in self.link_args: + if not isinstance(i, str): + raise InvalidArguments('Link_args arguments must be strings.') + for l in self.link_args: + if '-Wl,-rpath' in l or l.startswith('-rpath'): + mlog.warning(textwrap.dedent('''\ + Please do not define rpath with a linker argument, use install_rpath + or build_rpath properties instead. + This will become a hard error in a future Meson release. + ''')) + self.process_link_depends(kwargs.get('link_depends', [])) + # Target-specific include dirs must be added BEFORE include dirs from + # internal deps (added inside self.add_deps()) to override them. + inclist = extract_as_list(kwargs, 'include_directories') + self.add_include_dirs(inclist) + # Add dependencies (which also have include_directories) + deplist = extract_as_list(kwargs, 'dependencies') + self.add_deps(deplist) + # If an item in this list is False, the output corresponding to + # the list index of that item will not be installed + self.install_dir = typeslistify(kwargs.get('install_dir', []), + (str, bool)) + self.install_mode = kwargs.get('install_mode', None) + self.install_tag = stringlistify(kwargs.get('install_tag', [None])) + main_class = kwargs.get('main_class', '') + if not isinstance(main_class, str): + raise InvalidArguments('Main class must be a string') + self.main_class = main_class + if isinstance(self, Executable): + # This kwarg is deprecated. The value of "none" means that the kwarg + # was not specified and win_subsystem should be used instead. + self.gui_app = None + if 'gui_app' in kwargs: + if 'win_subsystem' in kwargs: + raise InvalidArguments('Can specify only gui_app or win_subsystem for a target, not both.') + self.gui_app = kwargs['gui_app'] + if not isinstance(self.gui_app, bool): + raise InvalidArguments('Argument gui_app must be boolean.') + self.win_subsystem = self.validate_win_subsystem(kwargs.get('win_subsystem', 'console')) + elif 'gui_app' in kwargs: + raise InvalidArguments('Argument gui_app can only be used on executables.') + elif 'win_subsystem' in kwargs: + raise InvalidArguments('Argument win_subsystem can only be used on executables.') + extra_files = extract_as_list(kwargs, 'extra_files') + for i in extra_files: + assert isinstance(i, File) + if i in self.extra_files: + continue + trial = os.path.join(self.environment.get_source_dir(), i.subdir, i.fname) + if not os.path.isfile(trial): + raise InvalidArguments(f'Tried to add non-existing extra file {i}.') + self.extra_files.append(i) + self.install_rpath: str = kwargs.get('install_rpath', '') + if not isinstance(self.install_rpath, str): + raise InvalidArguments('Install_rpath is not a string.') + self.build_rpath = kwargs.get('build_rpath', '') + if not isinstance(self.build_rpath, str): + raise InvalidArguments('Build_rpath is not a string.') + resources = extract_as_list(kwargs, 'resources') + for r in resources: + if not isinstance(r, str): + raise InvalidArguments('Resource argument is not a string.') + trial = os.path.join(self.environment.get_source_dir(), self.subdir, r) + if not os.path.isfile(trial): + raise InvalidArguments(f'Tried to add non-existing resource {r}.') + self.resources = resources + if 'name_prefix' in kwargs: + name_prefix = kwargs['name_prefix'] + if isinstance(name_prefix, list): + if name_prefix: + raise InvalidArguments('name_prefix array must be empty to signify default.') + else: + if not isinstance(name_prefix, str): + raise InvalidArguments('name_prefix must be a string.') + self.prefix = name_prefix + self.name_prefix_set = True + if 'name_suffix' in kwargs: + name_suffix = kwargs['name_suffix'] + if isinstance(name_suffix, list): + if name_suffix: + raise InvalidArguments('name_suffix array must be empty to signify default.') + else: + if not isinstance(name_suffix, str): + raise InvalidArguments('name_suffix must be a string.') + if name_suffix == '': + raise InvalidArguments('name_suffix should not be an empty string. ' + 'If you want meson to use the default behaviour ' + 'for each platform pass `[]` (empty array)') + self.suffix = name_suffix + self.name_suffix_set = True + if isinstance(self, StaticLibrary): + # You can't disable PIC on OS X. The compiler ignores -fno-PIC. + # PIC is always on for Windows (all code is position-independent + # since library loading is done differently) + m = self.environment.machines[self.for_machine] + if m.is_darwin() or m.is_windows(): + self.pic = True + else: + self.pic = self._extract_pic_pie(kwargs, 'pic', 'b_staticpic') + if isinstance(self, Executable) or (isinstance(self, StaticLibrary) and not self.pic): + # Executables must be PIE on Android + if self.environment.machines[self.for_machine].is_android(): + self.pie = True + else: + self.pie = self._extract_pic_pie(kwargs, 'pie', 'b_pie') + self.implicit_include_directories = kwargs.get('implicit_include_directories', True) + if not isinstance(self.implicit_include_directories, bool): + raise InvalidArguments('Implicit_include_directories must be a boolean.') + self.gnu_symbol_visibility = kwargs.get('gnu_symbol_visibility', '') + if not isinstance(self.gnu_symbol_visibility, str): + raise InvalidArguments('GNU symbol visibility must be a string.') + if self.gnu_symbol_visibility != '': + permitted = ['default', 'internal', 'hidden', 'protected', 'inlineshidden'] + if self.gnu_symbol_visibility not in permitted: + raise InvalidArguments('GNU symbol visibility arg {} not one of: {}'.format(self.gnu_symbol_visibility, ', '.join(permitted))) + + rust_dependency_map = kwargs.get('rust_dependency_map', {}) + if not isinstance(rust_dependency_map, dict): + raise InvalidArguments(f'Invalid rust_dependency_map "{rust_dependency_map}": must be a dictionary.') + if any(not isinstance(v, str) for v in rust_dependency_map.values()): + raise InvalidArguments(f'Invalid rust_dependency_map "{rust_dependency_map}": must be a dictionary with string values.') + self.rust_dependency_map = rust_dependency_map + + def validate_win_subsystem(self, value: str) -> str: + value = value.lower() + if re.fullmatch(r'(boot_application|console|efi_application|efi_boot_service_driver|efi_rom|efi_runtime_driver|native|posix|windows)(,\d+(\.\d+)?)?', value) is None: + raise InvalidArguments(f'Invalid value for win_subsystem: {value}.') + return value + + def _extract_pic_pie(self, kwargs, arg: str, option: str): + # Check if we have -fPIC, -fpic, -fPIE, or -fpie in cflags + all_flags = self.extra_args['c'] + self.extra_args['cpp'] + if '-f' + arg.lower() in all_flags or '-f' + arg.upper() in all_flags: + mlog.warning(f"Use the '{arg}' kwarg instead of passing '-f{arg}' manually to {self.name!r}") + return True + + k = OptionKey(option) + if arg in kwargs: + val = kwargs[arg] + elif k in self.environment.coredata.options: + val = self.environment.coredata.options[k].value + else: + val = False + + if not isinstance(val, bool): + raise InvalidArguments(f'Argument {arg} to {self.name!r} must be boolean') + return val + + def get_filename(self) -> str: + return self.filename + + def get_outputs(self) -> T.List[str]: + return self.outputs + + def get_extra_args(self, language): + return self.extra_args.get(language, []) + + @lru_cache(maxsize=None) + def get_dependencies(self) -> OrderedSet[Target]: + # Get all targets needed for linking. This includes all link_with and + # link_whole targets, and also all dependencies of static libraries + # recursively. The algorithm here is closely related to what we do in + # get_internal_static_libraries(): Installed static libraries include + # objects from all their dependencies already. + result: OrderedSet[Target] = OrderedSet() + for t in itertools.chain(self.link_targets, self.link_whole_targets): + if t not in result: + result.add(t) + if isinstance(t, StaticLibrary): + t.get_dependencies_recurse(result) + return result + + def get_dependencies_recurse(self, result: OrderedSet[Target], include_internals: bool = True) -> None: + # self is always a static library because we don't need to pull dependencies + # of shared libraries. If self is installed (not internal) it already + # include objects extracted from all its internal dependencies so we can + # skip them. + include_internals = include_internals and self.is_internal() + for t in self.link_targets: + if t in result: + continue + if include_internals or not t.is_internal(): + result.add(t) + if isinstance(t, StaticLibrary): + t.get_dependencies_recurse(result, include_internals) + for t in self.link_whole_targets: + t.get_dependencies_recurse(result, include_internals) + + def get_source_subdir(self): + return self.subdir + + def get_sources(self): + return self.sources + + def get_objects(self) -> T.List[T.Union[str, 'File', 'ExtractedObjects']]: + return self.objects + + def get_generated_sources(self) -> T.List['GeneratedTypes']: + return self.generated + + def should_install(self) -> bool: + return self.need_install + + def has_pch(self) -> bool: + return bool(self.pch) + + def get_pch(self, language: str) -> T.List[str]: + return self.pch.get(language, []) + + def get_include_dirs(self) -> T.List['IncludeDirs']: + return self.include_dirs + + def add_deps(self, deps): + deps = listify(deps) + for dep in deps: + if dep in self.added_deps: + continue + + if isinstance(dep, dependencies.InternalDependency): + # Those parts that are internal. + self.process_sourcelist(dep.sources) + self.extra_files.extend(f for f in dep.extra_files if f not in self.extra_files) + self.add_include_dirs(dep.include_directories, dep.get_include_type()) + self.objects.extend(dep.objects) + self.link_targets.extend(dep.libraries) + self.link_whole_targets.extend(dep.whole_libraries) + if dep.get_compile_args() or dep.get_link_args(): + # Those parts that are external. + extpart = dependencies.InternalDependency('undefined', + [], + dep.get_compile_args(), + dep.get_link_args(), + [], [], [], [], [], {}, [], [], []) + self.external_deps.append(extpart) + # Deps of deps. + self.add_deps(dep.ext_deps) + elif isinstance(dep, dependencies.Dependency): + if dep not in self.external_deps: + self.external_deps.append(dep) + self.process_sourcelist(dep.get_sources()) + self.add_deps(dep.ext_deps) + elif isinstance(dep, BuildTarget): + raise InvalidArguments('''Tried to use a build target as a dependency. +You probably should put it in link_with instead.''') + else: + # This is a bit of a hack. We do not want Build to know anything + # about the interpreter so we can't import it and use isinstance. + # This should be reliable enough. + if hasattr(dep, 'held_object'): + # FIXME: subproject is not a real ObjectHolder so we have to do this by hand + dep = dep.held_object + if hasattr(dep, 'project_args_frozen') or hasattr(dep, 'global_args_frozen'): + raise InvalidArguments('Tried to use subproject object as a dependency.\n' + 'You probably wanted to use a dependency declared in it instead.\n' + 'Access it by calling get_variable() on the subproject object.') + raise InvalidArguments(f'Argument is of an unacceptable type {type(dep).__name__!r}.\nMust be ' + 'either an external dependency (returned by find_library() or ' + 'dependency()) or an internal dependency (returned by ' + 'declare_dependency()).') + + dep_d_features = dep.d_features + + for feature in ('versions', 'import_dirs'): + if feature in dep_d_features: + self.d_features[feature].extend(dep_d_features[feature]) + + self.added_deps.add(dep) + + def get_external_deps(self) -> T.List[dependencies.Dependency]: + return self.external_deps + + def is_internal(self) -> bool: + return False + + def link(self, targets): + for t in targets: + if isinstance(self, StaticLibrary) and self.need_install: + if isinstance(t, (CustomTarget, CustomTargetIndex)): + if not t.should_install(): + mlog.warning(f'Try to link an installed static library target {self.name} with a' + 'custom target that is not installed, this might cause problems' + 'when you try to use this static library') + elif t.is_internal(): + # When we're a static library and we link_with to an + # internal/convenience library, promote to link_whole. + self.link_whole([t]) + continue + if not isinstance(t, (Target, CustomTargetIndex)): + if isinstance(t, dependencies.ExternalLibrary): + raise MesonException(textwrap.dedent('''\ + An external library was used in link_with keyword argument, which + is reserved for libraries built as part of this project. External + libraries must be passed using the dependencies keyword argument + instead, because they are conceptually "external dependencies", + just like those detected with the dependency() function. + ''')) + raise InvalidArguments(f'{t!r} is not a target.') + if not t.is_linkable_target(): + raise InvalidArguments(f"Link target '{t!s}' is not linkable.") + if isinstance(self, SharedLibrary) and isinstance(t, StaticLibrary) and not t.pic: + msg = f"Can't link non-PIC static library {t.name!r} into shared library {self.name!r}. " + msg += "Use the 'pic' option to static_library to build with PIC." + raise InvalidArguments(msg) + if self.for_machine is not t.for_machine: + msg = f'Tried to mix libraries for machines {self.for_machine} and {t.for_machine} in target {self.name!r}' + if self.environment.is_cross_build(): + raise InvalidArguments(msg + ' This is not possible in a cross build.') + else: + mlog.warning(msg + ' This will fail in cross build.') + self.link_targets.append(t) + + def link_whole(self, targets): + for t in targets: + if isinstance(t, (CustomTarget, CustomTargetIndex)): + if not t.is_linkable_target(): + raise InvalidArguments(f'Custom target {t!r} is not linkable.') + if t.links_dynamically(): + raise InvalidArguments('Can only link_whole custom targets that are static archives.') + elif not isinstance(t, StaticLibrary): + raise InvalidArguments(f'{t!r} is not a static library.') + elif isinstance(self, SharedLibrary) and not t.pic: + msg = f"Can't link non-PIC static library {t.name!r} into shared library {self.name!r}. " + msg += "Use the 'pic' option to static_library to build with PIC." + raise InvalidArguments(msg) + if self.for_machine is not t.for_machine: + msg = f'Tried to mix libraries for machines {self.for_machine} and {t.for_machine} in target {self.name!r}' + if self.environment.is_cross_build(): + raise InvalidArguments(msg + ' This is not possible in a cross build.') + else: + mlog.warning(msg + ' This will fail in cross build.') + if isinstance(self, StaticLibrary) and not self.uses_rust(): + if isinstance(t, (CustomTarget, CustomTargetIndex)) or t.uses_rust(): + # There are cases we cannot do this, however. In Rust, for + # example, this can't be done with Rust ABI libraries, though + # it could be done with C ABI libraries, though there are + # several meson issues that need to be fixed: + # https://github.com/mesonbuild/meson/issues/10722 + # https://github.com/mesonbuild/meson/issues/10723 + # https://github.com/mesonbuild/meson/issues/10724 + # FIXME: We could extract the .a archive to get object files + raise InvalidArguments('Cannot link_whole a custom or Rust target into a static library') + # When we're a static library and we link_whole: to another static + # library, we need to add that target's objects to ourselves. + self.objects += [t.extract_all_objects()] + # If we install this static library we also need to include objects + # from all uninstalled static libraries it depends on. + if self.need_install: + for lib in t.get_internal_static_libraries(): + self.objects += [lib.extract_all_objects()] + self.link_whole_targets.append(t) + + @lru_cache(maxsize=None) + def get_internal_static_libraries(self) -> OrderedSet[Target]: + result: OrderedSet[Target] = OrderedSet() + self.get_internal_static_libraries_recurse(result) + return result + + def get_internal_static_libraries_recurse(self, result: OrderedSet[Target]) -> None: + for t in self.link_targets: + if t.is_internal() and t not in result: + result.add(t) + t.get_internal_static_libraries_recurse(result) + for t in self.link_whole_targets: + if t.is_internal(): + t.get_internal_static_libraries_recurse(result) + + def add_pch(self, language: str, pchlist: T.List[str]) -> None: + if not pchlist: + return + elif len(pchlist) == 1: + if not environment.is_header(pchlist[0]): + raise InvalidArguments(f'PCH argument {pchlist[0]} is not a header.') + elif len(pchlist) == 2: + if environment.is_header(pchlist[0]): + if not environment.is_source(pchlist[1]): + raise InvalidArguments('PCH definition must contain one header and at most one source.') + elif environment.is_source(pchlist[0]): + if not environment.is_header(pchlist[1]): + raise InvalidArguments('PCH definition must contain one header and at most one source.') + pchlist = [pchlist[1], pchlist[0]] + else: + raise InvalidArguments(f'PCH argument {pchlist[0]} is of unknown type.') + + if os.path.dirname(pchlist[0]) != os.path.dirname(pchlist[1]): + raise InvalidArguments('PCH files must be stored in the same folder.') + + FeatureDeprecated.single_use('PCH source files', '0.50.0', self.subproject, + 'Only a single header file should be used.') + elif len(pchlist) > 2: + raise InvalidArguments('PCH definition may have a maximum of 2 files.') + for f in pchlist: + if not isinstance(f, str): + raise MesonException('PCH arguments must be strings.') + if not os.path.isfile(os.path.join(self.environment.source_dir, self.subdir, f)): + raise MesonException(f'File {f} does not exist.') + self.pch[language] = pchlist + + def add_include_dirs(self, args: T.Sequence['IncludeDirs'], set_is_system: T.Optional[str] = None) -> None: + ids: T.List['IncludeDirs'] = [] + for a in args: + if not isinstance(a, IncludeDirs): + raise InvalidArguments('Include directory to be added is not an include directory object.') + ids.append(a) + if set_is_system is None: + set_is_system = 'preserve' + if set_is_system != 'preserve': + is_system = set_is_system == 'system' + ids = [IncludeDirs(x.get_curdir(), x.get_incdirs(), is_system, x.get_extra_build_dirs()) for x in ids] + self.include_dirs += ids + + def add_compiler_args(self, language: str, args: T.List['FileOrString']) -> None: + args = listify(args) + for a in args: + if not isinstance(a, (str, File)): + raise InvalidArguments('A non-string passed to compiler args.') + if language in self.extra_args: + self.extra_args[language] += args + else: + self.extra_args[language] = args + + def get_aliases(self) -> T.List[T.Tuple[str, str, str]]: + return [] + + def get_langs_used_by_deps(self) -> T.List[str]: + ''' + Sometimes you want to link to a C++ library that exports C API, which + means the linker must link in the C++ stdlib, and we must use a C++ + compiler for linking. The same is also applicable for objc/objc++, etc, + so we can keep using clink_langs for the priority order. + + See: https://github.com/mesonbuild/meson/issues/1653 + ''' + langs = [] # type: T.List[str] + + # Check if any of the external libraries were written in this language + for dep in self.external_deps: + if dep.language is None: + continue + if dep.language not in langs: + langs.append(dep.language) + # Check if any of the internal libraries this target links to were + # written in this language + for link_target in itertools.chain(self.link_targets, self.link_whole_targets): + if isinstance(link_target, (CustomTarget, CustomTargetIndex)): + continue + for language in link_target.compilers: + if language not in langs: + langs.append(language) + + return langs + + def get_prelinker(self): + if self.link_language: + comp = self.all_compilers[self.link_language] + return comp + for l in clink_langs: + if l in self.compilers: + try: + prelinker = self.all_compilers[l] + except KeyError: + raise MesonException( + f'Could not get a prelinker linker for build target {self.name!r}. ' + f'Requires a compiler for language "{l}", but that is not ' + 'a project language.') + return prelinker + raise MesonException(f'Could not determine prelinker for {self.name!r}.') + + def get_clink_dynamic_linker_and_stdlibs(self) -> T.Tuple['Compiler', T.List[str]]: + ''' + We use the order of languages in `clink_langs` to determine which + linker to use in case the target has sources compiled with multiple + compilers. All languages other than those in this list have their own + linker. + Note that Vala outputs C code, so Vala sources can use any linker + that can link compiled C. We don't actually need to add an exception + for Vala here because of that. + ''' + # If the user set the link_language, just return that. + if self.link_language: + comp = self.all_compilers[self.link_language] + return comp, comp.language_stdlib_only_link_flags(self.environment) + + # Since dependencies could come from subprojects, they could have + # languages we don't have in self.all_compilers. Use the global list of + # all compilers here. + all_compilers = self.environment.coredata.compilers[self.for_machine] + + # Languages used by dependencies + dep_langs = self.get_langs_used_by_deps() + + # Pick a compiler based on the language priority-order + for l in clink_langs: + if l in self.compilers or l in dep_langs: + try: + linker = all_compilers[l] + except KeyError: + raise MesonException( + f'Could not get a dynamic linker for build target {self.name!r}. ' + f'Requires a linker for language "{l}", but that is not ' + 'a project language.') + stdlib_args: T.List[str] = self.get_used_stdlib_args(linker.language) + # Type of var 'linker' is Compiler. + # Pretty hard to fix because the return value is passed everywhere + return linker, stdlib_args + + # None of our compilers can do clink, this happens for example if the + # target only has ASM sources. Pick the first capable compiler. + for l in clink_langs: + try: + comp = self.all_compilers[l] + return comp, comp.language_stdlib_only_link_flags(self.environment) + except KeyError: + pass + + raise AssertionError(f'Could not get a dynamic linker for build target {self.name!r}') + + def get_used_stdlib_args(self, link_language: str) -> T.List[str]: + all_compilers = self.environment.coredata.compilers[self.for_machine] + all_langs = set(self.compilers).union(self.get_langs_used_by_deps()) + stdlib_args: T.List[str] = [] + for dl in all_langs: + if dl != link_language and (dl, link_language) not in self._MASK_LANGS: + # We need to use all_compilers here because + # get_langs_used_by_deps could return a language from a + # subproject + stdlib_args.extend(all_compilers[dl].language_stdlib_only_link_flags(self.environment)) + return stdlib_args + + def uses_rust(self) -> bool: + return 'rust' in self.compilers + + def uses_fortran(self) -> bool: + return 'fortran' in self.compilers + + def get_using_msvc(self) -> bool: + ''' + Check if the dynamic linker is MSVC. Used by Executable, StaticLibrary, + and SharedLibrary for deciding when to use MSVC-specific file naming + and debug filenames. + + If at least some code is built with MSVC and the final library is + linked with MSVC, we can be sure that some debug info will be + generated. We only check the dynamic linker here because the static + linker is guaranteed to be of the same type. + + Interesting cases: + 1. The Vala compiler outputs C code to be compiled by whatever + C compiler we're using, so all objects will still be created by the + MSVC compiler. + 2. If the target contains only objects, process_compilers guesses and + picks the first compiler that smells right. + ''' + # Rustc can use msvc style linkers + if self.uses_rust(): + compiler = self.all_compilers['rust'] + else: + compiler, _ = self.get_clink_dynamic_linker_and_stdlibs() + # Mixing many languages with MSVC is not supported yet so ignore stdlibs. + return compiler and compiler.get_linker_id() in {'link', 'lld-link', 'xilink', 'optlink'} + + def check_module_linking(self): + ''' + Warn if shared modules are linked with target: (link_with) #2865 + ''' + for link_target in self.link_targets: + if isinstance(link_target, SharedModule) and not link_target.force_soname: + if self.environment.machines[self.for_machine].is_darwin(): + raise MesonException( + f'target {self.name} links against shared module {link_target.name}. This is not permitted on OSX') + elif self.environment.machines[self.for_machine].is_android() and isinstance(self, SharedModule): + # Android requires shared modules that use symbols from other shared modules to + # be linked before they can be dlopen()ed in the correct order. Not doing so + # leads to a missing symbol error: https://github.com/android/ndk/issues/201 + link_target.force_soname = True + else: + mlog.deprecation(f'target {self.name} links against shared module {link_target.name}, which is incorrect.' + '\n ' + f'This will be an error in the future, so please use shared_library() for {link_target.name} instead.' + '\n ' + f'If shared_module() was used for {link_target.name} because it has references to undefined symbols,' + '\n ' + 'use shared_library() with `override_options: [\'b_lundef=false\']` instead.') + link_target.force_soname = True + +class FileInTargetPrivateDir: + """Represents a file with the path '/path/to/build/target_private_dir/fname'. + target_private_dir is the return value of get_target_private_dir which is e.g. 'subdir/target.p'. + """ + + def __init__(self, fname: str): + self.fname = fname + + def __str__(self) -> str: + return self.fname + +class FileMaybeInTargetPrivateDir: + """Union between 'File' and 'FileInTargetPrivateDir'""" + + def __init__(self, inner: T.Union[File, FileInTargetPrivateDir]): + self.inner = inner + + @property + def fname(self) -> str: + return self.inner.fname + + def rel_to_builddir(self, build_to_src: str, target_private_dir: str) -> str: + if isinstance(self.inner, FileInTargetPrivateDir): + return os.path.join(target_private_dir, self.inner.fname) + return self.inner.rel_to_builddir(build_to_src) + + def absolute_path(self, srcdir: str, builddir: str) -> str: + if isinstance(self.inner, FileInTargetPrivateDir): + raise RuntimeError('Unreachable code') + return self.inner.absolute_path(srcdir, builddir) + + def __str__(self) -> str: + return self.fname + +class Generator(HoldableObject): + def __init__(self, exe: T.Union['Executable', programs.ExternalProgram], + arguments: T.List[str], + output: T.List[str], + # how2dataclass + *, + depfile: T.Optional[str] = None, + capture: bool = False, + depends: T.Optional[T.List[T.Union[BuildTarget, 'CustomTarget']]] = None, + name: str = 'Generator'): + self.exe = exe + self.depfile = depfile + self.capture = capture + self.depends: T.List[T.Union[BuildTarget, 'CustomTarget']] = depends or [] + self.arglist = arguments + self.outputs = output + self.name = name + + def __repr__(self) -> str: + repr_str = "<{0}: {1}>" + return repr_str.format(self.__class__.__name__, self.exe) + + def get_exe(self) -> T.Union['Executable', programs.ExternalProgram]: + return self.exe + + def get_base_outnames(self, inname: str) -> T.List[str]: + plainname = os.path.basename(inname) + basename = os.path.splitext(plainname)[0] + bases = [x.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) for x in self.outputs] + return bases + + def get_dep_outname(self, inname: str) -> T.List[str]: + if self.depfile is None: + raise InvalidArguments('Tried to get dep name for rule that does not have dependency file defined.') + plainname = os.path.basename(inname) + basename = os.path.splitext(plainname)[0] + return self.depfile.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) + + def get_arglist(self, inname: str) -> T.List[str]: + plainname = os.path.basename(inname) + basename = os.path.splitext(plainname)[0] + return [x.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) for x in self.arglist] + + @staticmethod + def is_parent_path(parent: str, trial: str) -> bool: + relpath = pathlib.PurePath(trial).relative_to(parent) + return relpath.parts[0] != '..' # For subdirs we can only go "down". + + def process_files(self, files: T.Iterable[T.Union[str, File, 'CustomTarget', 'CustomTargetIndex', 'GeneratedList']], + state: T.Union['Interpreter', 'ModuleState'], + preserve_path_from: T.Optional[str] = None, + extra_args: T.Optional[T.List[str]] = None) -> 'GeneratedList': + output = GeneratedList(self, state.subdir, preserve_path_from, extra_args=extra_args if extra_args is not None else []) + + for e in files: + if isinstance(e, CustomTarget): + output.depends.add(e) + if isinstance(e, CustomTargetIndex): + output.depends.add(e.target) + if isinstance(e, (CustomTarget, CustomTargetIndex)): + output.depends.add(e) + fs = [File.from_built_file(state.subdir, f) for f in e.get_outputs()] + elif isinstance(e, GeneratedList): + if preserve_path_from: + raise InvalidArguments("generator.process: 'preserve_path_from' is not allowed if one input is a 'generated_list'.") + output.depends.add(e) + fs = [FileInTargetPrivateDir(f) for f in e.get_outputs()] + elif isinstance(e, str): + fs = [File.from_source_file(state.environment.source_dir, state.subdir, e)] + else: + fs = [e] + + for f in fs: + if preserve_path_from: + abs_f = f.absolute_path(state.environment.source_dir, state.environment.build_dir) + if not self.is_parent_path(preserve_path_from, abs_f): + raise InvalidArguments('generator.process: When using preserve_path_from, all input files must be in a subdirectory of the given dir.') + f = FileMaybeInTargetPrivateDir(f) + output.add_file(f, state) + return output + + +@dataclass(eq=False) +class GeneratedList(HoldableObject): + + """The output of generator.process.""" + + generator: Generator + subdir: str + preserve_path_from: T.Optional[str] + extra_args: T.List[str] + + def __post_init__(self) -> None: + self.name = self.generator.exe + self.depends: T.Set[GeneratedTypes] = set() + self.infilelist: T.List[FileMaybeInTargetPrivateDir] = [] + self.outfilelist: T.List[str] = [] + self.outmap: T.Dict[FileMaybeInTargetPrivateDir, T.List[str]] = {} + self.extra_depends = [] # XXX: Doesn't seem to be used? + self.depend_files: T.List[File] = [] + + if self.extra_args is None: + self.extra_args: T.List[str] = [] + + if isinstance(self.generator.exe, programs.ExternalProgram): + if not self.generator.exe.found(): + raise InvalidArguments('Tried to use not-found external program as generator') + path = self.generator.exe.get_path() + if os.path.isabs(path): + # Can only add a dependency on an external program which we + # know the absolute path of + self.depend_files.append(File.from_absolute_file(path)) + + def add_preserved_path_segment(self, infile: FileMaybeInTargetPrivateDir, outfiles: T.List[str], state: T.Union['Interpreter', 'ModuleState']) -> T.List[str]: + result: T.List[str] = [] + in_abs = infile.absolute_path(state.environment.source_dir, state.environment.build_dir) + assert os.path.isabs(self.preserve_path_from) + rel = os.path.relpath(in_abs, self.preserve_path_from) + path_segment = os.path.dirname(rel) + for of in outfiles: + result.append(os.path.join(path_segment, of)) + return result + + def add_file(self, newfile: FileMaybeInTargetPrivateDir, state: T.Union['Interpreter', 'ModuleState']) -> None: + self.infilelist.append(newfile) + outfiles = self.generator.get_base_outnames(newfile.fname) + if self.preserve_path_from: + outfiles = self.add_preserved_path_segment(newfile, outfiles, state) + self.outfilelist += outfiles + self.outmap[newfile] = outfiles + + def get_inputs(self) -> T.List[FileMaybeInTargetPrivateDir]: + return self.infilelist + + def get_outputs(self) -> T.List[str]: + return self.outfilelist + + def get_outputs_for(self, filename: FileMaybeInTargetPrivateDir) -> T.List[str]: + return self.outmap[filename] + + def get_generator(self) -> 'Generator': + return self.generator + + def get_extra_args(self) -> T.List[str]: + return self.extra_args + + def get_subdir(self) -> str: + return self.subdir + + +class Executable(BuildTarget): + known_kwargs = known_exe_kwargs + + typename = 'executable' + + def __init__( + self, + name: str, + subdir: str, + subproject: SubProject, + for_machine: MachineChoice, + sources: T.List['SourceOutputs'], + structured_sources: T.Optional[StructuredSources], + objects: T.List[ObjectTypes], + environment: environment.Environment, + compilers: T.Dict[str, 'Compiler'], + kwargs): + key = OptionKey('b_pie') + if 'pie' not in kwargs and key in environment.coredata.options: + kwargs['pie'] = environment.coredata.options[key].value + super().__init__(name, subdir, subproject, for_machine, sources, structured_sources, objects, + environment, compilers, kwargs) + # Check for export_dynamic + self.export_dynamic = kwargs.get('export_dynamic', False) + if not isinstance(self.export_dynamic, bool): + raise InvalidArguments('"export_dynamic" keyword argument must be a boolean') + self.implib = kwargs.get('implib') + if not isinstance(self.implib, (bool, str, type(None))): + raise InvalidArguments('"export_dynamic" keyword argument must be a boolean or string') + if self.implib: + self.export_dynamic = True + if self.export_dynamic and self.implib is False: + raise InvalidArguments('"implib" keyword argument must not be false for if "export_dynamic" is true') + # Only linkwithable if using export_dynamic + self.is_linkwithable = self.export_dynamic + # Remember that this exe was returned by `find_program()` through an override + self.was_returned_by_find_program = False + + def post_init(self) -> None: + super().post_init() + machine = self.environment.machines[self.for_machine] + # Unless overridden, executables have no suffix or prefix. Except on + # Windows and with C#/Mono executables where the suffix is 'exe' + if not hasattr(self, 'prefix'): + self.prefix = '' + if not hasattr(self, 'suffix'): + # Executable for Windows or C#/Mono + if machine.is_windows() or machine.is_cygwin() or 'cs' in self.compilers: + self.suffix = 'exe' + elif machine.system.startswith('wasm') or machine.system == 'emscripten': + self.suffix = 'js' + elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('armclang') or + 'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('armclang')): + self.suffix = 'axf' + elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('ccrx') or + 'cpp' in self.compilers and self.compilers['cpp'].get_id().startswith('ccrx')): + self.suffix = 'abs' + elif ('c' in self.compilers and self.compilers['c'].get_id().startswith('xc16')): + self.suffix = 'elf' + elif ('c' in self.compilers and self.compilers['c'].get_id() in {'ti', 'c2000'} or + 'cpp' in self.compilers and self.compilers['cpp'].get_id() in {'ti', 'c2000'}): + self.suffix = 'out' + elif ('c' in self.compilers and self.compilers['c'].get_id() in {'mwccarm', 'mwcceppc'} or + 'cpp' in self.compilers and self.compilers['cpp'].get_id() in {'mwccarm', 'mwcceppc'}): + self.suffix = 'nef' + else: + self.suffix = machine.get_exe_suffix() + self.filename = self.name + if self.suffix: + self.filename += '.' + self.suffix + self.outputs = [self.filename] + + # The import library this target will generate + self.import_filename = None + # The import library that Visual Studio would generate (and accept) + self.vs_import_filename = None + # The import library that GCC would generate (and prefer) + self.gcc_import_filename = None + # The debugging information file this target will generate + self.debug_filename = None + + # If using export_dynamic, set the import library name + if self.export_dynamic: + implib_basename = self.name + '.exe' + if isinstance(self.implib, str): + implib_basename = self.implib + if machine.is_windows() or machine.is_cygwin(): + self.vs_import_filename = f'{implib_basename}.lib' + self.gcc_import_filename = f'lib{implib_basename}.a' + if self.get_using_msvc(): + self.import_filename = self.vs_import_filename + else: + self.import_filename = self.gcc_import_filename + + create_debug_file = ( + machine.is_windows() + and ('cs' in self.compilers or self.uses_rust() or self.get_using_msvc()) + # .pdb file is created only when debug symbols are enabled + and self.environment.coredata.get_option(OptionKey("debug")) + ) + if create_debug_file: + self.debug_filename = self.name + '.pdb' + + def get_default_install_dir(self) -> T.Tuple[str, str]: + return self.environment.get_bindir(), '{bindir}' + + def description(self): + '''Human friendly description of the executable''' + return self.name + + def type_suffix(self): + return "@exe" + + def get_import_filename(self) -> T.Optional[str]: + """ + The name of the import library that will be outputted by the compiler + + Returns None if there is no import library required for this platform + """ + return self.import_filename + + def get_import_filenameslist(self): + if self.import_filename: + return [self.vs_import_filename, self.gcc_import_filename] + return [] + + def get_debug_filename(self) -> T.Optional[str]: + """ + The name of debuginfo file that will be created by the compiler + + Returns None if the build won't create any debuginfo file + """ + return self.debug_filename + + def is_linkable_target(self): + return self.is_linkwithable + + def get_command(self) -> 'ImmutableListProtocol[str]': + """Provides compatibility with ExternalProgram. + + Since you can override ExternalProgram instances with Executables. + """ + return self.outputs + + def get_path(self) -> str: + """Provides compatibility with ExternalProgram.""" + return os.path.join(self.subdir, self.filename) + + def found(self) -> bool: + """Provides compatibility with ExternalProgram.""" + return True + + +class StaticLibrary(BuildTarget): + known_kwargs = known_stlib_kwargs + + typename = 'static library' + + def __init__( + self, + name: str, + subdir: str, + subproject: SubProject, + for_machine: MachineChoice, + sources: T.List['SourceOutputs'], + structured_sources: T.Optional[StructuredSources], + objects: T.List[ObjectTypes], + environment: environment.Environment, + compilers: T.Dict[str, 'Compiler'], + kwargs): + self.prelink = kwargs.get('prelink', False) + if not isinstance(self.prelink, bool): + raise InvalidArguments('Prelink keyword argument must be a boolean.') + super().__init__(name, subdir, subproject, for_machine, sources, structured_sources, objects, + environment, compilers, kwargs) + + def post_init(self) -> None: + super().post_init() + if 'cs' in self.compilers: + raise InvalidArguments('Static libraries not supported for C#.') + if 'rust' in self.compilers: + # If no crate type is specified, or it's the generic lib type, use rlib + if not hasattr(self, 'rust_crate_type') or self.rust_crate_type == 'lib': + mlog.debug('Defaulting Rust static library target crate type to rlib') + self.rust_crate_type = 'rlib' + # Don't let configuration proceed with a non-static crate type + elif self.rust_crate_type not in ['rlib', 'staticlib']: + raise InvalidArguments(f'Crate type "{self.rust_crate_type}" invalid for static libraries; must be "rlib" or "staticlib"') + # See https://github.com/rust-lang/rust/issues/110460 + if self.rust_crate_type == 'rlib' and any(c in self.name for c in ['-', ' ', '.']): + raise InvalidArguments('Rust crate type "rlib" does not allow spaces, periods or dashes in the library name ' + 'due to a limitation of rustc. Replace them with underscores, for example') + if self.rust_crate_type == 'staticlib': + # FIXME: In the case of no-std we should not add those libraries, + # but we have no way to know currently. + rustc = self.compilers['rust'] + d = dependencies.InternalDependency('undefined', [], [], + rustc.native_static_libs, + [], [], [], [], [], {}, [], [], []) + self.external_deps.append(d) + # By default a static library is named libfoo.a even on Windows because + # MSVC does not have a consistent convention for what static libraries + # are called. The MSVC CRT uses libfoo.lib syntax but nothing else uses + # it and GCC only looks for static libraries called foo.lib and + # libfoo.a. However, we cannot use foo.lib because that's the same as + # the import library. Using libfoo.a is ok because people using MSVC + # always pass the library filename while linking anyway. + if not hasattr(self, 'prefix'): + self.prefix = 'lib' + if not hasattr(self, 'suffix'): + if 'rust' in self.compilers: + if not hasattr(self, 'rust_crate_type') or self.rust_crate_type == 'rlib': + # default Rust static library suffix + self.suffix = 'rlib' + elif self.rust_crate_type == 'staticlib': + self.suffix = 'a' + else: + self.suffix = 'a' + self.filename = self.prefix + self.name + '.' + self.suffix + self.outputs = [self.filename] + + def get_link_deps_mapping(self, prefix: str) -> T.Mapping[str, str]: + return {} + + def get_default_install_dir(self) -> T.Tuple[str, str]: + return self.environment.get_static_lib_dir(), '{libdir_static}' + + def type_suffix(self): + return "@sta" + + def process_kwargs(self, kwargs): + super().process_kwargs(kwargs) + if 'rust_crate_type' in kwargs: + rust_crate_type = kwargs['rust_crate_type'] + if isinstance(rust_crate_type, str): + self.rust_crate_type = rust_crate_type + else: + raise InvalidArguments(f'Invalid rust_crate_type "{rust_crate_type}": must be a string.') + + def is_linkable_target(self): + return True + + def is_internal(self) -> bool: + return not self.need_install + +class SharedLibrary(BuildTarget): + known_kwargs = known_shlib_kwargs + + typename = 'shared library' + + def __init__( + self, + name: str, + subdir: str, + subproject: SubProject, + for_machine: MachineChoice, + sources: T.List['SourceOutputs'], + structured_sources: T.Optional[StructuredSources], + objects: T.List[ObjectTypes], + environment: environment.Environment, + compilers: T.Dict[str, 'Compiler'], + kwargs): + self.soversion = None + self.ltversion = None + # Max length 2, first element is compatibility_version, second is current_version + self.darwin_versions = [] + self.vs_module_defs = None + # The import library this target will generate + self.import_filename = None + # The import library that Visual Studio would generate (and accept) + self.vs_import_filename = None + # The import library that GCC would generate (and prefer) + self.gcc_import_filename = None + # The debugging information file this target will generate + self.debug_filename = None + # Use by the pkgconfig module + self.shared_library_only = False + super().__init__(name, subdir, subproject, for_machine, sources, structured_sources, objects, + environment, compilers, kwargs) + + def post_init(self) -> None: + super().post_init() + if 'rust' in self.compilers: + # If no crate type is specified, or it's the generic lib type, use dylib + if not hasattr(self, 'rust_crate_type') or self.rust_crate_type == 'lib': + mlog.debug('Defaulting Rust dynamic library target crate type to "dylib"') + self.rust_crate_type = 'dylib' + # Don't let configuration proceed with a non-dynamic crate type + elif self.rust_crate_type not in ['dylib', 'cdylib', 'proc-macro']: + raise InvalidArguments(f'Crate type "{self.rust_crate_type}" invalid for dynamic libraries; must be "dylib", "cdylib", or "proc-macro"') + # See https://github.com/rust-lang/rust/issues/110460 + if self.rust_crate_type != 'cdylib' and any(c in self.name for c in ['-', ' ', '.']): + raise InvalidArguments('Rust crate types "dylib" and "proc-macro" do not allow spaces, periods or dashes in the library name ' + 'due to a limitation of rustc. Replace them with underscores, for example') + + if not hasattr(self, 'prefix'): + self.prefix = None + if not hasattr(self, 'suffix'): + self.suffix = None + self.basic_filename_tpl = '{0.prefix}{0.name}.{0.suffix}' + self.determine_filenames() + + def get_link_deps_mapping(self, prefix: str) -> T.Mapping[str, str]: + result: T.Dict[str, str] = {} + mappings = self.get_transitive_link_deps_mapping(prefix) + old = get_target_macos_dylib_install_name(self) + if old not in mappings: + fname = self.get_filename() + outdirs, _, _ = self.get_install_dir() + new = os.path.join(prefix, outdirs[0], fname) + result.update({old: new}) + mappings.update(result) + return mappings + + def get_default_install_dir(self) -> T.Tuple[str, str]: + return self.environment.get_shared_lib_dir(), '{libdir_shared}' + + def determine_filenames(self): + """ + See https://github.com/mesonbuild/meson/pull/417 for details. + + First we determine the filename template (self.filename_tpl), then we + set the output filename (self.filename). + + The template is needed while creating aliases (self.get_aliases), + which are needed while generating .so shared libraries for Linux. + + Besides this, there's also the import library name, which is only used + on Windows since on that platform the linker uses a separate library + called the "import library" during linking instead of the shared + library (DLL). The toolchain will output an import library in one of + two formats: GCC or Visual Studio. + + When we're building with Visual Studio, the import library that will be + generated by the toolchain is self.vs_import_filename, and with + MinGW/GCC, it's self.gcc_import_filename. self.import_filename will + always contain the import library name this target will generate. + """ + prefix = '' + suffix = '' + create_debug_file = False + self.filename_tpl = self.basic_filename_tpl + # NOTE: manual prefix/suffix override is currently only tested for C/C++ + # C# and Mono + if 'cs' in self.compilers: + prefix = '' + suffix = 'dll' + self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}' + create_debug_file = True + # C, C++, Swift, Vala + # Only Windows uses a separate import library for linking + # For all other targets/platforms import_filename stays None + elif self.environment.machines[self.for_machine].is_windows(): + suffix = 'dll' + self.vs_import_filename = '{}{}.lib'.format(self.prefix if self.prefix is not None else '', self.name) + self.gcc_import_filename = '{}{}.dll.a'.format(self.prefix if self.prefix is not None else 'lib', self.name) + if self.uses_rust(): + # Shared library is of the form foo.dll + prefix = '' + # Import library is called foo.dll.lib + self.import_filename = f'{self.name}.dll.lib' + # .pdb file is only created when debug symbols are enabled + create_debug_file = self.environment.coredata.get_option(OptionKey("debug")) + elif self.get_using_msvc(): + # Shared library is of the form foo.dll + prefix = '' + # Import library is called foo.lib + self.import_filename = self.vs_import_filename + # .pdb file is only created when debug symbols are enabled + create_debug_file = self.environment.coredata.get_option(OptionKey("debug")) + # Assume GCC-compatible naming + else: + # Shared library is of the form libfoo.dll + prefix = 'lib' + # Import library is called libfoo.dll.a + self.import_filename = self.gcc_import_filename + # Shared library has the soversion if it is defined + if self.soversion: + self.filename_tpl = '{0.prefix}{0.name}-{0.soversion}.{0.suffix}' + else: + self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}' + elif self.environment.machines[self.for_machine].is_cygwin(): + suffix = 'dll' + self.gcc_import_filename = '{}{}.dll.a'.format(self.prefix if self.prefix is not None else 'lib', self.name) + # Shared library is of the form cygfoo.dll + # (ld --dll-search-prefix=cyg is the default) + prefix = 'cyg' + # Import library is called libfoo.dll.a + self.import_filename = self.gcc_import_filename + if self.soversion: + self.filename_tpl = '{0.prefix}{0.name}-{0.soversion}.{0.suffix}' + else: + self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}' + elif self.environment.machines[self.for_machine].is_darwin(): + prefix = 'lib' + suffix = 'dylib' + # On macOS, the filename can only contain the major version + if self.soversion: + # libfoo.X.dylib + self.filename_tpl = '{0.prefix}{0.name}.{0.soversion}.{0.suffix}' + else: + # libfoo.dylib + self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}' + elif self.environment.machines[self.for_machine].is_android(): + prefix = 'lib' + suffix = 'so' + # Android doesn't support shared_library versioning + self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}' + else: + prefix = 'lib' + suffix = 'so' + if self.ltversion: + # libfoo.so.X[.Y[.Z]] (.Y and .Z are optional) + self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}.{0.ltversion}' + elif self.soversion: + # libfoo.so.X + self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}.{0.soversion}' + else: + # No versioning, libfoo.so + self.filename_tpl = '{0.prefix}{0.name}.{0.suffix}' + if self.prefix is None: + self.prefix = prefix + if self.suffix is None: + self.suffix = suffix + self.filename = self.filename_tpl.format(self) + # There may have been more outputs added by the time we get here, so + # only replace the first entry + self.outputs[0] = self.filename + if create_debug_file: + self.debug_filename = os.path.splitext(self.filename)[0] + '.pdb' + + @staticmethod + def _validate_darwin_versions(darwin_versions): + try: + if isinstance(darwin_versions, int): + darwin_versions = str(darwin_versions) + if isinstance(darwin_versions, str): + darwin_versions = 2 * [darwin_versions] + if not isinstance(darwin_versions, list): + raise InvalidArguments('Shared library darwin_versions: must be a string, integer,' + f'or a list, not {darwin_versions!r}') + if len(darwin_versions) > 2: + raise InvalidArguments('Shared library darwin_versions: list must contain 2 or fewer elements') + if len(darwin_versions) == 1: + darwin_versions = 2 * darwin_versions + for i, v in enumerate(darwin_versions[:]): + if isinstance(v, int): + v = str(v) + if not isinstance(v, str): + raise InvalidArguments('Shared library darwin_versions: list elements ' + f'must be strings or integers, not {v!r}') + if not re.fullmatch(r'[0-9]+(\.[0-9]+){0,2}', v): + raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z where ' + 'X, Y, Z are numbers, and Y and Z are optional') + parts = v.split('.') + if len(parts) in {1, 2, 3} and int(parts[0]) > 65535: + raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z ' + 'where X is [0, 65535] and Y, Z are optional') + if len(parts) in {2, 3} and int(parts[1]) > 255: + raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z ' + 'where Y is [0, 255] and Y, Z are optional') + if len(parts) == 3 and int(parts[2]) > 255: + raise InvalidArguments('Shared library darwin_versions: must be X.Y.Z ' + 'where Z is [0, 255] and Y, Z are optional') + darwin_versions[i] = v + except ValueError: + raise InvalidArguments('Shared library darwin_versions: value is invalid') + return darwin_versions + + def process_kwargs(self, kwargs): + super().process_kwargs(kwargs) + + if not self.environment.machines[self.for_machine].is_android(): + # Shared library version + if 'version' in kwargs: + self.ltversion = kwargs['version'] + if not isinstance(self.ltversion, str): + raise InvalidArguments('Shared library version needs to be a string, not ' + type(self.ltversion).__name__) + if not re.fullmatch(r'[0-9]+(\.[0-9]+){0,2}', self.ltversion): + raise InvalidArguments(f'Invalid Shared library version "{self.ltversion}". Must be of the form X.Y.Z where all three are numbers. Y and Z are optional.') + # Try to extract/deduce the soversion + if 'soversion' in kwargs: + self.soversion = kwargs['soversion'] + if isinstance(self.soversion, int): + self.soversion = str(self.soversion) + if not isinstance(self.soversion, str): + raise InvalidArguments('Shared library soversion is not a string or integer.') + elif self.ltversion: + # library version is defined, get the soversion from that + # We replicate what Autotools does here and take the first + # number of the version by default. + self.soversion = self.ltversion.split('.')[0] + # macOS, iOS and tvOS dylib compatibility_version and current_version + if 'darwin_versions' in kwargs: + self.darwin_versions = self._validate_darwin_versions(kwargs['darwin_versions']) + elif self.soversion: + # If unspecified, pick the soversion + self.darwin_versions = 2 * [self.soversion] + + # Visual Studio module-definitions file + if 'vs_module_defs' in kwargs: + path = kwargs['vs_module_defs'] + if isinstance(path, str): + if os.path.isabs(path): + self.vs_module_defs = File.from_absolute_file(path) + else: + self.vs_module_defs = File.from_source_file(self.environment.source_dir, self.subdir, path) + elif isinstance(path, File): + # When passing a generated file. + self.vs_module_defs = path + elif hasattr(path, 'get_filename'): + # When passing output of a Custom Target + self.vs_module_defs = File.from_built_file(path.subdir, path.get_filename()) + else: + raise InvalidArguments( + 'Shared library vs_module_defs must be either a string, ' + 'a file object or a Custom Target') + self.process_link_depends(path) + + if 'rust_crate_type' in kwargs: + rust_crate_type = kwargs['rust_crate_type'] + if isinstance(rust_crate_type, str): + self.rust_crate_type = rust_crate_type + else: + raise InvalidArguments(f'Invalid rust_crate_type "{rust_crate_type}": must be a string.') + if rust_crate_type == 'proc-macro': + FeatureNew.single_use('Rust crate type "proc-macro"', '0.62.0', self.subproject) + + def get_import_filename(self) -> T.Optional[str]: + """ + The name of the import library that will be outputted by the compiler + + Returns None if there is no import library required for this platform + """ + return self.import_filename + + def get_debug_filename(self) -> T.Optional[str]: + """ + The name of debuginfo file that will be created by the compiler + + Returns None if the build won't create any debuginfo file + """ + return self.debug_filename + + def get_import_filenameslist(self): + if self.import_filename: + return [self.vs_import_filename, self.gcc_import_filename] + return [] + + def get_all_link_deps(self): + return [self] + self.get_transitive_link_deps() + + def get_aliases(self) -> T.List[T.Tuple[str, str, str]]: + """ + If the versioned library name is libfoo.so.0.100.0, aliases are: + * libfoo.so.0 (soversion) -> libfoo.so.0.100.0 + * libfoo.so (unversioned; for linking) -> libfoo.so.0 + Same for dylib: + * libfoo.dylib (unversioned; for linking) -> libfoo.0.dylib + """ + aliases: T.List[T.Tuple[str, str, str]] = [] + # Aliases are only useful with .so and .dylib libraries. Also if + # there's no self.soversion (no versioning), we don't need aliases. + if self.suffix not in ('so', 'dylib') or not self.soversion: + return aliases + # With .so libraries, the minor and micro versions are also in the + # filename. If ltversion != soversion we create an soversion alias: + # libfoo.so.0 -> libfoo.so.0.100.0 + # Where libfoo.so.0.100.0 is the actual library + if self.suffix == 'so' and self.ltversion and self.ltversion != self.soversion: + alias_tpl = self.filename_tpl.replace('ltversion', 'soversion') + ltversion_filename = alias_tpl.format(self) + tag = self.install_tag[0] or 'runtime' + aliases.append((ltversion_filename, self.filename, tag)) + # libfoo.so.0/libfoo.0.dylib is the actual library + else: + ltversion_filename = self.filename + # Unversioned alias: + # libfoo.so -> libfoo.so.0 + # libfoo.dylib -> libfoo.0.dylib + tag = self.install_tag[0] or 'devel' + aliases.append((self.basic_filename_tpl.format(self), ltversion_filename, tag)) + return aliases + + def type_suffix(self): + return "@sha" + + def is_linkable_target(self): + return True + +# A shared library that is meant to be used with dlopen rather than linking +# into something else. +class SharedModule(SharedLibrary): + known_kwargs = known_shmod_kwargs + + typename = 'shared module' + + def __init__( + self, + name: str, + subdir: str, + subproject: SubProject, + for_machine: MachineChoice, + sources: T.List['SourceOutputs'], + structured_sources: T.Optional[StructuredSources], + objects: T.List[ObjectTypes], + environment: environment.Environment, + compilers: T.Dict[str, 'Compiler'], + kwargs): + if 'version' in kwargs: + raise MesonException('Shared modules must not specify the version kwarg.') + if 'soversion' in kwargs: + raise MesonException('Shared modules must not specify the soversion kwarg.') + super().__init__(name, subdir, subproject, for_machine, sources, + structured_sources, objects, environment, compilers, kwargs) + # We need to set the soname in cases where build files link the module + # to build targets, see: https://github.com/mesonbuild/meson/issues/9492 + self.force_soname = False + + def get_default_install_dir(self) -> T.Tuple[str, str]: + return self.environment.get_shared_module_dir(), '{moduledir_shared}' + +class BothLibraries(SecondLevelHolder): + def __init__(self, shared: SharedLibrary, static: StaticLibrary) -> None: + self._preferred_library = 'shared' + self.shared = shared + self.static = static + self.subproject = self.shared.subproject + + def __repr__(self) -> str: + return f'' + + def get_default_object(self) -> BuildTarget: + if self._preferred_library == 'shared': + return self.shared + elif self._preferred_library == 'static': + return self.static + raise MesonBugException(f'self._preferred_library == "{self._preferred_library}" is neither "shared" nor "static".') + +class CommandBase: + + depend_files: T.List[File] + dependencies: T.List[T.Union[BuildTarget, 'CustomTarget']] + subproject: str + + def flatten_command(self, cmd: T.Sequence[T.Union[str, File, programs.ExternalProgram, BuildTargetTypes]]) -> \ + T.List[T.Union[str, File, BuildTarget, 'CustomTarget']]: + cmd = listify(cmd) + final_cmd: T.List[T.Union[str, File, BuildTarget, 'CustomTarget']] = [] + for c in cmd: + if isinstance(c, str): + final_cmd.append(c) + elif isinstance(c, File): + self.depend_files.append(c) + final_cmd.append(c) + elif isinstance(c, programs.ExternalProgram): + if not c.found(): + raise InvalidArguments('Tried to use not-found external program in "command"') + path = c.get_path() + if os.path.isabs(path): + # Can only add a dependency on an external program which we + # know the absolute path of + self.depend_files.append(File.from_absolute_file(path)) + final_cmd += c.get_command() + elif isinstance(c, (BuildTarget, CustomTarget)): + self.dependencies.append(c) + final_cmd.append(c) + elif isinstance(c, CustomTargetIndex): + FeatureNew.single_use('CustomTargetIndex for command argument', '0.60', self.subproject) + self.dependencies.append(c.target) + final_cmd += self.flatten_command(File.from_built_file(c.get_subdir(), c.get_filename())) + elif isinstance(c, list): + final_cmd += self.flatten_command(c) + else: + raise InvalidArguments(f'Argument {c!r} in "command" is invalid') + return final_cmd + +class CustomTarget(Target, CommandBase): + + typename = 'custom' + + def __init__(self, + name: T.Optional[str], + subdir: str, + subproject: str, + environment: environment.Environment, + command: T.Sequence[T.Union[ + str, BuildTargetTypes, GeneratedList, + programs.ExternalProgram, File]], + sources: T.Sequence[T.Union[ + str, File, BuildTargetTypes, ExtractedObjects, + GeneratedList, programs.ExternalProgram]], + outputs: T.List[str], + *, + build_always_stale: bool = False, + build_by_default: T.Optional[bool] = None, + capture: bool = False, + console: bool = False, + depend_files: T.Optional[T.Sequence[FileOrString]] = None, + extra_depends: T.Optional[T.Sequence[T.Union[str, SourceOutputs]]] = None, + depfile: T.Optional[str] = None, + env: T.Optional[EnvironmentVariables] = None, + feed: bool = False, + install: bool = False, + install_dir: T.Optional[T.List[T.Union[str, Literal[False]]]] = None, + install_mode: T.Optional[FileMode] = None, + install_tag: T.Optional[T.List[T.Optional[str]]] = None, + absolute_paths: bool = False, + backend: T.Optional['Backend'] = None, + ): + # TODO expose keyword arg to make MachineChoice.HOST configurable + super().__init__(name, subdir, subproject, False, MachineChoice.HOST, environment, + install, build_always_stale) + self.sources = list(sources) + self.outputs = substitute_values( + outputs, get_filenames_templates_dict( + get_sources_string_names(sources, backend), + [])) + self.build_by_default = build_by_default if build_by_default is not None else install + self.capture = capture + self.console = console + self.depend_files = list(depend_files or []) + self.dependencies: T.List[T.Union[CustomTarget, BuildTarget]] = [] + # must be after depend_files and dependencies + self.command = self.flatten_command(command) + self.depfile = depfile + self.env = env or EnvironmentVariables() + self.extra_depends = list(extra_depends or []) + self.feed = feed + self.install_dir = list(install_dir or []) + self.install_mode = install_mode + self.install_tag = _process_install_tag(install_tag, len(self.outputs)) + self.name = name if name else self.outputs[0] + + # Whether to use absolute paths for all files on the commandline + self.absolute_paths = absolute_paths + + def get_default_install_dir(self) -> T.Tuple[str, str]: + return None, None + + def __repr__(self): + repr_str = "<{0} {1}: {2}>" + return repr_str.format(self.__class__.__name__, self.get_id(), self.command) + + def get_target_dependencies(self) -> T.List[T.Union[SourceOutputs, str]]: + deps: T.List[T.Union[SourceOutputs, str]] = [] + deps.extend(self.dependencies) + deps.extend(self.extra_depends) + for c in self.sources: + if isinstance(c, CustomTargetIndex): + deps.append(c.target) + elif not isinstance(c, programs.ExternalProgram): + deps.append(c) + return deps + + def get_transitive_build_target_deps(self) -> T.Set[T.Union[BuildTarget, 'CustomTarget']]: + ''' + Recursively fetch the build targets that this custom target depends on, + whether through `command:`, `depends:`, or `sources:` The recursion is + only performed on custom targets. + This is useful for setting PATH on Windows for finding required DLLs. + F.ex, if you have a python script that loads a C module that links to + other DLLs in your project. + ''' + bdeps: T.Set[T.Union[BuildTarget, 'CustomTarget']] = set() + deps = self.get_target_dependencies() + for d in deps: + if isinstance(d, BuildTarget): + bdeps.add(d) + elif isinstance(d, CustomTarget): + bdeps.update(d.get_transitive_build_target_deps()) + return bdeps + + def get_dependencies(self): + return self.dependencies + + def should_install(self) -> bool: + return self.install + + def get_custom_install_dir(self) -> T.List[T.Union[str, Literal[False]]]: + return self.install_dir + + def get_custom_install_mode(self) -> T.Optional['FileMode']: + return self.install_mode + + def get_outputs(self) -> T.List[str]: + return self.outputs + + def get_filename(self) -> str: + return self.outputs[0] + + def get_sources(self) -> T.List[T.Union[str, File, BuildTarget, GeneratedTypes, ExtractedObjects, programs.ExternalProgram]]: + return self.sources + + def get_generated_lists(self) -> T.List[GeneratedList]: + genlists: T.List[GeneratedList] = [] + for c in self.sources: + if isinstance(c, GeneratedList): + genlists.append(c) + return genlists + + def get_generated_sources(self) -> T.List[GeneratedList]: + return self.get_generated_lists() + + def get_dep_outname(self, infilenames): + if self.depfile is None: + raise InvalidArguments('Tried to get depfile name for custom_target that does not have depfile defined.') + if infilenames: + plainname = os.path.basename(infilenames[0]) + basename = os.path.splitext(plainname)[0] + return self.depfile.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) + else: + if '@BASENAME@' in self.depfile or '@PLAINNAME@' in self.depfile: + raise InvalidArguments('Substitution in depfile for custom_target that does not have an input file.') + return self.depfile + + def is_linkable_output(self, output: str) -> bool: + if output.endswith(('.a', '.dll', '.lib', '.so', '.dylib')): + return True + # libfoo.so.X soname + if re.search(r'\.so(\.\d+)*$', output): + return True + return False + + def is_linkable_target(self) -> bool: + if len(self.outputs) != 1: + return False + return self.is_linkable_output(self.outputs[0]) + + def links_dynamically(self) -> bool: + """Whether this target links dynamically or statically + + Does not assert the target is linkable, just that it is not shared + + :return: True if is dynamically linked, otherwise False + """ + suf = os.path.splitext(self.outputs[0])[-1] + return suf not in {'.a', '.lib'} + + def get_link_deps_mapping(self, prefix: str) -> T.Mapping[str, str]: + return {} + + def get_link_dep_subdirs(self) -> T.AbstractSet[str]: + return OrderedSet() + + def get_all_link_deps(self): + return [] + + def is_internal(self) -> bool: + ''' + Returns True if this is a not installed static library. + ''' + if len(self.outputs) != 1: + return False + return CustomTargetIndex(self, self.outputs[0]).is_internal() + + def extract_all_objects(self) -> T.List[T.Union[str, 'ExtractedObjects']]: + return self.get_outputs() + + def type_suffix(self): + return "@cus" + + def __getitem__(self, index: int) -> 'CustomTargetIndex': + return CustomTargetIndex(self, self.outputs[index]) + + def __setitem__(self, index, value): + raise NotImplementedError + + def __delitem__(self, index): + raise NotImplementedError + + def __iter__(self): + for i in self.outputs: + yield CustomTargetIndex(self, i) + + def __len__(self) -> int: + return len(self.outputs) + +class CompileTarget(BuildTarget): + ''' + Target that only compile sources without linking them together. + It can be used as preprocessor, or transpiler. + ''' + + typename = 'compile' + + def __init__(self, + name: str, + subdir: str, + subproject: str, + environment: environment.Environment, + sources: T.List['SourceOutputs'], + output_templ: str, + compiler: Compiler, + backend: Backend, + compile_args: T.List[str], + include_directories: T.List[IncludeDirs], + dependencies: T.List[dependencies.Dependency]): + compilers = {compiler.get_language(): compiler} + kwargs = { + 'build_by_default': False, + f'{compiler.language}_args': compile_args, + 'include_directories': include_directories, + 'dependencies': dependencies, + } + super().__init__(name, subdir, subproject, compiler.for_machine, + sources, None, [], environment, compilers, kwargs) + self.filename = name + self.compiler = compiler + self.output_templ = output_templ + self.outputs = [] + self.sources_map: T.Dict[File, str] = {} + for f in self.sources: + self._add_output(f) + for gensrc in self.generated: + for s in gensrc.get_outputs(): + rel_src = backend.get_target_generated_dir(self, gensrc, s) + self._add_output(File.from_built_relative(rel_src)) + + def type_suffix(self) -> str: + return "@compile" + + @property + def is_unity(self) -> bool: + return False + + def _add_output(self, f: File) -> None: + plainname = os.path.basename(f.fname) + basename = os.path.splitext(plainname)[0] + o = self.output_templ.replace('@BASENAME@', basename).replace('@PLAINNAME@', plainname) + self.outputs.append(o) + self.sources_map[f] = o + + +class RunTarget(Target, CommandBase): + + typename = 'run' + + def __init__(self, name: str, + command: T.Sequence[T.Union[str, File, BuildTargetTypes, programs.ExternalProgram]], + dependencies: T.Sequence[Target], + subdir: str, + subproject: str, + environment: environment.Environment, + env: T.Optional['EnvironmentVariables'] = None, + default_env: bool = True): + # These don't produce output artifacts + super().__init__(name, subdir, subproject, False, MachineChoice.BUILD, environment) + self.dependencies = dependencies + self.depend_files = [] + self.command = self.flatten_command(command) + self.absolute_paths = False + self.env = env + self.default_env = default_env + + def __repr__(self) -> str: + repr_str = "<{0} {1}: {2}>" + return repr_str.format(self.__class__.__name__, self.get_id(), self.command[0]) + + def get_dependencies(self) -> T.List[T.Union[BuildTarget, 'CustomTarget']]: + return self.dependencies + + def get_generated_sources(self) -> T.List['GeneratedTypes']: + return [] + + def get_sources(self) -> T.List[File]: + return [] + + def should_install(self) -> bool: + return False + + def get_filename(self) -> str: + return self.name + + def get_outputs(self) -> T.List[str]: + if isinstance(self.name, str): + return [self.name] + elif isinstance(self.name, list): + return self.name + else: + raise RuntimeError('RunTarget: self.name is neither a list nor a string. This is a bug') + + def type_suffix(self) -> str: + return "@run" + +class AliasTarget(RunTarget): + + typename = 'alias' + + def __init__(self, name: str, dependencies: T.Sequence['Target'], + subdir: str, subproject: str, environment: environment.Environment): + super().__init__(name, [], dependencies, subdir, subproject, environment) + + def __repr__(self): + repr_str = "<{0} {1}>" + return repr_str.format(self.__class__.__name__, self.get_id()) + +class Jar(BuildTarget): + known_kwargs = known_jar_kwargs + + typename = 'jar' + + def __init__(self, name: str, subdir: str, subproject: str, for_machine: MachineChoice, + sources: T.List[SourceOutputs], structured_sources: T.Optional['StructuredSources'], + objects, environment: environment.Environment, compilers: T.Dict[str, 'Compiler'], + kwargs): + super().__init__(name, subdir, subproject, for_machine, sources, structured_sources, objects, + environment, compilers, kwargs) + for s in self.sources: + if not s.endswith('.java'): + raise InvalidArguments(f'Jar source {s} is not a java file.') + for t in self.link_targets: + if not isinstance(t, Jar): + raise InvalidArguments(f'Link target {t} is not a jar target.') + if self.structured_sources: + raise InvalidArguments('structured sources are not supported in Java targets.') + self.filename = self.name + '.jar' + self.outputs = [self.filename] + self.java_args = kwargs.get('java_args', []) + self.java_resources: T.Optional[StructuredSources] = kwargs.get('java_resources', None) + + def get_main_class(self): + return self.main_class + + def type_suffix(self): + return "@jar" + + def get_java_args(self): + return self.java_args + + def get_java_resources(self) -> T.Optional[StructuredSources]: + return self.java_resources + + def validate_install(self): + # All jar targets are installable. + pass + + def is_linkable_target(self): + return True + + def get_classpath_args(self): + cp_paths = [os.path.join(l.get_subdir(), l.get_filename()) for l in self.link_targets] + cp_string = os.pathsep.join(cp_paths) + if cp_string: + return ['-cp', os.pathsep.join(cp_paths)] + return [] + + def get_default_install_dir(self) -> T.Tuple[str, str]: + return self.environment.get_jar_dir(), '{jardir}' + +@dataclass(eq=False) +class CustomTargetIndex(HoldableObject): + + """A special opaque object returned by indexing a CustomTarget. This object + exists in Meson, but acts as a proxy in the backends, making targets depend + on the CustomTarget it's derived from, but only adding one source file to + the sources. + """ + + typename: T.ClassVar[str] = 'custom' + + target: T.Union[CustomTarget, CompileTarget] + output: str + + def __post_init__(self) -> None: + self.for_machine = self.target.for_machine + + @property + def name(self) -> str: + return f'{self.target.name}[{self.output}]' + + def __repr__(self): + return ''.format(self.target, self.output) + + def get_outputs(self) -> T.List[str]: + return [self.output] + + def get_subdir(self) -> str: + return self.target.get_subdir() + + def get_filename(self) -> str: + return self.output + + def get_id(self) -> str: + return self.target.get_id() + + def get_all_link_deps(self): + return self.target.get_all_link_deps() + + def get_link_deps_mapping(self, prefix: str) -> T.Mapping[str, str]: + return self.target.get_link_deps_mapping(prefix) + + def get_link_dep_subdirs(self) -> T.AbstractSet[str]: + return self.target.get_link_dep_subdirs() + + def is_linkable_target(self) -> bool: + return self.target.is_linkable_output(self.output) + + def links_dynamically(self) -> bool: + """Whether this target links dynamically or statically + + Does not assert the target is linkable, just that it is not shared + + :return: True if is dynamically linked, otherwise False + """ + suf = os.path.splitext(self.output)[-1] + return suf not in {'.a', '.lib'} + + def should_install(self) -> bool: + return self.target.should_install() + + def is_internal(self) -> bool: + ''' + Returns True if this is a not installed static library + ''' + suf = os.path.splitext(self.output)[-1] + return suf in {'.a', '.lib'} and not self.should_install() + + def extract_all_objects(self) -> T.List[T.Union[str, 'ExtractedObjects']]: + return self.target.extract_all_objects() + + def get_custom_install_dir(self) -> T.List[T.Union[str, Literal[False]]]: + return self.target.get_custom_install_dir() + +class ConfigurationData(HoldableObject): + def __init__(self, initial_values: T.Optional[T.Union[ + T.Dict[str, T.Tuple[T.Union[str, int, bool], T.Optional[str]]], + T.Dict[str, T.Union[str, int, bool]]] + ] = None): + super().__init__() + self.values: T.Dict[str, T.Tuple[T.Union[str, int, bool], T.Optional[str]]] = \ + {k: v if isinstance(v, tuple) else (v, None) for k, v in initial_values.items()} if initial_values else {} + self.used: bool = False + + def __repr__(self) -> str: + return repr(self.values) + + def __contains__(self, value: str) -> bool: + return value in self.values + + def __bool__(self) -> bool: + return bool(self.values) + + def get(self, name: str) -> T.Tuple[T.Union[str, int, bool], T.Optional[str]]: + return self.values[name] # (val, desc) + + def keys(self) -> T.Iterator[str]: + return self.values.keys() + +# A bit poorly named, but this represents plain data files to copy +# during install. +@dataclass(eq=False) +class Data(HoldableObject): + sources: T.List[File] + install_dir: str + install_dir_name: str + install_mode: 'FileMode' + subproject: str + rename: T.List[str] = None + install_tag: T.Optional[str] = None + data_type: str = None + + def __post_init__(self) -> None: + if self.rename is None: + self.rename = [os.path.basename(f.fname) for f in self.sources] + +@dataclass(eq=False) +class SymlinkData(HoldableObject): + target: str + name: str + install_dir: str + subproject: str + install_tag: T.Optional[str] = None + + def __post_init__(self) -> None: + if self.name != os.path.basename(self.name): + raise InvalidArguments(f'Link name is "{self.name}", but link names cannot contain path separators. ' + 'The dir part should be in install_dir.') + +@dataclass(eq=False) +class TestSetup: + exe_wrapper: T.List[str] + gdb: bool + timeout_multiplier: int + env: EnvironmentVariables + exclude_suites: T.List[str] + +def get_sources_string_names(sources, backend): + ''' + For the specified list of @sources which can be strings, Files, or targets, + get all the output basenames. + ''' + names = [] + for s in sources: + if isinstance(s, str): + names.append(s) + elif isinstance(s, (BuildTarget, CustomTarget, CustomTargetIndex, GeneratedList)): + names += s.get_outputs() + elif isinstance(s, ExtractedObjects): + names += backend.determine_ext_objs(s) + elif isinstance(s, File): + names.append(s.fname) + else: + raise AssertionError(f'Unknown source type: {s!r}') + return names + +def load(build_dir: str) -> Build: + filename = os.path.join(build_dir, 'meson-private', 'build.dat') + try: + b = pickle_load(filename, 'Build data', Build) + # We excluded coredata when saving Build object, load it separately + b.environment.coredata = coredata.load(build_dir) + return b + except FileNotFoundError: + raise MesonException(f'No such build data file as {filename!r}.') + + +def save(obj: Build, filename: str) -> None: + # Exclude coredata because we pickle it separately already + cdata = obj.environment.coredata + obj.environment.coredata = None + try: + with open(filename, 'wb') as f: + pickle.dump(obj, f) + finally: + obj.environment.coredata = cdata diff --git a/vendored-meson/meson/mesonbuild/cargo/__init__.py b/vendored-meson/meson/mesonbuild/cargo/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/vendored-meson/meson/mesonbuild/cargo/builder.py b/vendored-meson/meson/mesonbuild/cargo/builder.py new file mode 100644 index 000000000000..49bc65db5e86 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/cargo/builder.py @@ -0,0 +1,284 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright © 2022-2023 Intel Corporation + +"""Provides helpers for building AST + +This is meant to make building Meson AST from foreign (largely declarative) +build descriptions easier. +""" + +from __future__ import annotations +import builtins +import dataclasses +import typing as T + +from .. import mparser + + +def _token(tid: str, filename: str, value: mparser.TV_TokenTypes) -> mparser.Token[mparser.TV_TokenTypes]: + """Create a Token object, but with the line numbers stubbed out. + + :param tid: the token id (such as string, number, etc) + :param filename: the filename that the token was generated from + :param value: the value of the token + :return: A Token object + """ + return mparser.Token(tid, filename, -1, -1, -1, (-1, -1), value) + + +def string(value: str, filename: str) -> mparser.StringNode: + """Build A StringNode + + :param value: the value of the string + :param filename: the file that the value came from + :return: A StringNode + """ + return mparser.StringNode(_token('string', filename, value)) + + +def number(value: int, filename: str) -> mparser.NumberNode: + """Build A NumberNode + + :param value: the value of the number + :param filename: the file that the value came from + :return: A NumberNode + """ + return mparser.NumberNode(_token('number', filename, value)) + + +def bool(value: builtins.bool, filename: str) -> mparser.BooleanNode: + """Build A BooleanNode + + :param value: the value of the boolean + :param filename: the file that the value came from + :return: A BooleanNode + """ + return mparser.BooleanNode(_token('bool', filename, value)) + + +def array(value: T.List[mparser.BaseNode], filename: str) -> mparser.ArrayNode: + """Build an Array Node + + :param value: A list of nodes to insert into the array + :param filename: The file the array is from + :return: An ArrayNode built from the arguments + """ + args = mparser.ArgumentNode(_token('array', filename, 'unused')) + args.arguments = value + return mparser.ArrayNode(args, -1, -1, -1, -1) + + +def identifier(value: str, filename: str) -> mparser.IdNode: + """Build A IdNode + + :param value: the value of the boolean + :param filename: the file that the value came from + :return: A BooleanNode + """ + return mparser.IdNode(_token('id', filename, value)) + + +def method(name: str, id_: mparser.IdNode, + pos: T.Optional[T.List[mparser.BaseNode]] = None, + kw: T.Optional[T.Mapping[str, mparser.BaseNode]] = None, + ) -> mparser.MethodNode: + """Create a method call. + + :param name: the name of the method + :param id_: the object to call the method of + :param pos: a list of positional arguments, defaults to None + :param kw: a dictionary of keyword arguments, defaults to None + :return: a method call object + """ + args = mparser.ArgumentNode(_token('array', id_.filename, 'unused')) + if pos is not None: + args.arguments = pos + if kw is not None: + args.kwargs = {identifier(k, id_.filename): v for k, v in kw.items()} + return mparser.MethodNode(id_.filename, -1, -1, id_, name, args) + + +def function(name: str, filename: str, + pos: T.Optional[T.List[mparser.BaseNode]] = None, + kw: T.Optional[T.Mapping[str, mparser.BaseNode]] = None, + ) -> mparser.FunctionNode: + """Create a function call. + + :param name: the name of the function + :param filename: The name of the current file being evaluated + :param pos: a list of positional arguments, defaults to None + :param kw: a dictionary of keyword arguments, defaults to None + :return: a method call object + """ + args = mparser.ArgumentNode(_token('array', filename, 'unused')) + if pos is not None: + args.arguments = pos + if kw is not None: + args.kwargs = {identifier(k, filename): v for k, v in kw.items()} + return mparser.FunctionNode(filename, -1, -1, -1, -1, name, args) + + +def equal(lhs: mparser.BaseNode, rhs: mparser.BaseNode) -> mparser.ComparisonNode: + """Create an equality operation + + :param lhs: The left hand side of the equal + :param rhs: the right hand side of the equal + :return: A compraison node + """ + return mparser.ComparisonNode('==', lhs, rhs) + + +def or_(lhs: mparser.BaseNode, rhs: mparser.BaseNode) -> mparser.OrNode: + """Create and OrNode + + :param lhs: The Left of the Node + :param rhs: The Right of the Node + :return: The OrNode + """ + return mparser.OrNode(lhs, rhs) + + +def and_(lhs: mparser.BaseNode, rhs: mparser.BaseNode) -> mparser.AndNode: + """Create an AndNode + + :param lhs: The left of the And + :param rhs: The right of the And + :return: The AndNode + """ + return mparser.AndNode(lhs, rhs) + + +def not_(value: mparser.BaseNode, filename: str) -> mparser.NotNode: + """Create a not node + + :param value: The value to negate + :param filename: the string filename + :return: The NotNode + """ + return mparser.NotNode(_token('not', filename, ''), value) + + +def assign(value: mparser.BaseNode, varname: str, filename: str) -> mparser.AssignmentNode: + """Create an AssignmentNode + + :param value: The rvalue + :param varname: The lvalue + :param filename: The filename + :return: An AssignmentNode + """ + return mparser.AssignmentNode(filename, -1, -1, varname, value) + + +def block(filename: str) -> mparser.CodeBlockNode: + return mparser.CodeBlockNode(_token('node', filename, '')) + + +@dataclasses.dataclass +class Builder: + + filename: str + + def assign(self, value: mparser.BaseNode, varname: str) -> mparser.AssignmentNode: + return assign(value, varname, self.filename) + + def string(self, value: str) -> mparser.StringNode: + """Build A StringNode + + :param value: the value of the string + :return: A StringNode + """ + return string(value, self.filename) + + def number(self, value: int) -> mparser.NumberNode: + """Build A NumberNode + + :param value: the value of the number + :return: A NumberNode + """ + return number(value, self.filename) + + def bool(self, value: builtins.bool) -> mparser.BooleanNode: + """Build A BooleanNode + + :param value: the value of the boolean + :return: A BooleanNode + """ + return bool(value, self.filename) + + def array(self, value: T.List[mparser.BaseNode]) -> mparser.ArrayNode: + """Build an Array Node + + :param value: A list of nodes to insert into the array + :return: An ArrayNode built from the arguments + """ + return array(value, self.filename) + + def identifier(self, value: str) -> mparser.IdNode: + """Build A IdNode + + :param value: the value of the boolean + :return: A BooleanNode + """ + return identifier(value, self.filename) + + def method(self, name: str, id_: mparser.IdNode, + pos: T.Optional[T.List[mparser.BaseNode]] = None, + kw: T.Optional[T.Mapping[str, mparser.BaseNode]] = None, + ) -> mparser.MethodNode: + """Create a method call. + + :param name: the name of the method + :param id_: the object to call the method of + :param pos: a list of positional arguments, defaults to None + :param kw: a dictionary of keyword arguments, defaults to None + :return: a method call object + """ + return method(name, id_, pos or [], kw or {}) + + def function(self, name: str, + pos: T.Optional[T.List[mparser.BaseNode]] = None, + kw: T.Optional[T.Mapping[str, mparser.BaseNode]] = None, + ) -> mparser.FunctionNode: + """Create a function call. + + :param name: the name of the function + :param pos: a list of positional arguments, defaults to None + :param kw: a dictionary of keyword arguments, defaults to None + :return: a method call object + """ + return function(name, self.filename, pos or [], kw or {}) + + def equal(self, lhs: mparser.BaseNode, rhs: mparser.BaseNode) -> mparser.ComparisonNode: + """Create an equality operation + + :param lhs: The left hand side of the equal + :param rhs: the right hand side of the equal + :return: A compraison node + """ + return equal(lhs, rhs) + + def or_(self, lhs: mparser.BaseNode, rhs: mparser.BaseNode) -> mparser.OrNode: + """Create and OrNode + + :param lhs: The Left of the Node + :param rhs: The Right of the Node + :return: The OrNode + """ + return or_(lhs, rhs) + + def and_(self, lhs: mparser.BaseNode, rhs: mparser.BaseNode) -> mparser.AndNode: + """Create an AndNode + + :param lhs: The left of the And + :param rhs: The right of the And + :return: The AndNode + """ + return and_(lhs, rhs) + + def not_(self, value: mparser.BaseNode, filename: str) -> mparser.NotNode: + """Create a not node + + :param value: The value to negate + :return: The NotNode + """ + return not_(value, self.filename) diff --git a/vendored-meson/meson/mesonbuild/cargo/cfg.py b/vendored-meson/meson/mesonbuild/cargo/cfg.py new file mode 100644 index 000000000000..ed6fd53d6843 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/cargo/cfg.py @@ -0,0 +1,276 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright © 2022-2023 Intel Corporation + +"""Rust CFG parser. + +Rust uses its `cfg()` format in cargo. + +This may have the following functions: + - all() + - any() + - not() + +And additionally is made up of `identifier [ = str]`. Where the str is optional, +so you could have examples like: +``` +[target.`cfg(unix)`.dependencies] +[target.'cfg(target_arch = "x86_64")'.dependencies] +[target.'cfg(all(target_arch = "x86_64", target_arch = "x86"))'.dependencies] +``` +""" + +from __future__ import annotations +import dataclasses +import enum +import functools +import typing as T + + +from . import builder +from .. import mparser +from ..mesonlib import MesonBugException + +if T.TYPE_CHECKING: + _T = T.TypeVar('_T') + _LEX_TOKEN = T.Tuple['TokenType', T.Optional[str]] + _LEX_STREAM = T.Iterable[_LEX_TOKEN] + _LEX_STREAM_AH = T.Iterator[T.Tuple[_LEX_TOKEN, T.Optional[_LEX_TOKEN]]] + + +class TokenType(enum.Enum): + + LPAREN = enum.auto() + RPAREN = enum.auto() + STRING = enum.auto() + IDENTIFIER = enum.auto() + ALL = enum.auto() + ANY = enum.auto() + NOT = enum.auto() + COMMA = enum.auto() + EQUAL = enum.auto() + + +def lexer(raw: str) -> _LEX_STREAM: + """Lex a cfg() expression. + + :param raw: The raw cfg() expression + :return: An iterable of tokens + """ + buffer: T.List[str] = [] + is_string: bool = False + for s in raw: + if s.isspace() or s in {')', '(', ',', '='} or (s == '"' and buffer): + val = ''.join(buffer) + buffer.clear() + if is_string: + yield (TokenType.STRING, val) + elif val == 'any': + yield (TokenType.ANY, None) + elif val == 'all': + yield (TokenType.ALL, None) + elif val == 'not': + yield (TokenType.NOT, None) + elif val: + yield (TokenType.IDENTIFIER, val) + + if s == '(': + yield (TokenType.LPAREN, None) + continue + elif s == ')': + yield (TokenType.RPAREN, None) + continue + elif s == ',': + yield (TokenType.COMMA, None) + continue + elif s == '=': + yield (TokenType.EQUAL, None) + continue + elif s.isspace(): + continue + + if s == '"': + is_string = not is_string + else: + buffer.append(s) + if buffer: + # This should always be an identifier + yield (TokenType.IDENTIFIER, ''.join(buffer)) + + +def lookahead(iter: T.Iterator[_T]) -> T.Iterator[T.Tuple[_T, T.Optional[_T]]]: + """Get the current value of the iterable, and the next if possible. + + :param iter: The iterable to look into + :yield: A tuple of the current value, and, if possible, the next + :return: nothing + """ + current: _T + next_: T.Optional[_T] + try: + next_ = next(iter) + except StopIteration: + # This is an empty iterator, there's nothing to look ahead to + return + + while True: + current = next_ + try: + next_ = next(iter) + except StopIteration: + next_ = None + + yield current, next_ + + if next_ is None: + break + + +@dataclasses.dataclass +class IR: + + """Base IR node for Cargo CFG.""" + + filename: str + +@dataclasses.dataclass +class String(IR): + + value: str + + +@dataclasses.dataclass +class Identifier(IR): + + value: str + + +@dataclasses.dataclass +class Equal(IR): + + lhs: IR + rhs: IR + + +@dataclasses.dataclass +class Any(IR): + + args: T.List[IR] + + +@dataclasses.dataclass +class All(IR): + + args: T.List[IR] + + +@dataclasses.dataclass +class Not(IR): + + value: IR + + +def _parse(ast: _LEX_STREAM_AH, filename: str) -> IR: + (token, value), n_stream = next(ast) + if n_stream is not None: + ntoken, _ = n_stream + else: + ntoken, _ = (None, None) + + stream: T.List[_LEX_TOKEN] + if token is TokenType.IDENTIFIER: + if ntoken is TokenType.EQUAL: + return Equal(filename, Identifier(filename, value), _parse(ast, filename)) + if token is TokenType.STRING: + return String(filename, value) + if token is TokenType.EQUAL: + # In this case the previous caller already has handled the equal + return _parse(ast, filename) + if token in {TokenType.ANY, TokenType.ALL}: + type_ = All if token is TokenType.ALL else Any + assert ntoken is TokenType.LPAREN + next(ast) # advance the iterator to get rid of the LPAREN + stream = [] + args: T.List[IR] = [] + while token is not TokenType.RPAREN: + (token, value), _ = next(ast) + if token is TokenType.COMMA: + args.append(_parse(lookahead(iter(stream)), filename)) + stream.clear() + else: + stream.append((token, value)) + if stream: + args.append(_parse(lookahead(iter(stream)), filename)) + return type_(filename, args) + if token is TokenType.NOT: + next(ast) # advance the iterator to get rid of the LPAREN + stream = [] + # Mypy can't figure out that token is overridden inside the while loop + while token is not TokenType.RPAREN: # type: ignore + (token, value), _ = next(ast) + stream.append((token, value)) + return Not(filename, _parse(lookahead(iter(stream)), filename)) + + raise MesonBugException(f'Unhandled Cargo token: {token}') + + +def parse(ast: _LEX_STREAM, filename: str) -> IR: + """Parse the tokenized list into Meson AST. + + :param ast: An iterable of Tokens + :param filename: The name of the file being parsed + :return: An mparser Node to be used as a conditional + """ + ast_i: _LEX_STREAM_AH = lookahead(iter(ast)) + return _parse(ast_i, filename) + + +@functools.singledispatch +def ir_to_meson(ir: T.Any) -> mparser.BaseNode: + raise NotImplementedError + + +@ir_to_meson.register +def _(ir: String) -> mparser.BaseNode: + return builder.string(ir.value, ir.filename) + + +@ir_to_meson.register +def _(ir: Identifier) -> mparser.BaseNode: + host_machine = builder.identifier('host_machine', ir.filename) + if ir.value == "target_arch": + return builder.method('cpu_family', host_machine) + elif ir.value in {"target_os", "target_family"}: + return builder.method('system', host_machine) + elif ir.value == "target_endian": + return builder.method('endian', host_machine) + raise MesonBugException(f"Unhandled Cargo identifier: {ir.value}") + + +@ir_to_meson.register +def _(ir: Equal) -> mparser.BaseNode: + return builder.equal(ir_to_meson(ir.lhs), ir_to_meson(ir.rhs)) + + +@ir_to_meson.register +def _(ir: Not) -> mparser.BaseNode: + return builder.not_(ir_to_meson(ir.value), ir.filename) + + +@ir_to_meson.register +def _(ir: Any) -> mparser.BaseNode: + args = iter(reversed(ir.args)) + last = next(args) + cur = builder.or_(ir_to_meson(next(args)), ir_to_meson(last)) + for a in args: + cur = builder.or_(ir_to_meson(a), cur) + return cur + + +@ir_to_meson.register +def _(ir: All) -> mparser.BaseNode: + args = iter(reversed(ir.args)) + last = next(args) + cur = builder.and_(ir_to_meson(next(args)), ir_to_meson(last)) + for a in args: + cur = builder.and_(ir_to_meson(a), cur) + return cur diff --git a/vendored-meson/meson/mesonbuild/cargo/interpreter.py b/vendored-meson/meson/mesonbuild/cargo/interpreter.py new file mode 100644 index 000000000000..59e1a1f068d1 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/cargo/interpreter.py @@ -0,0 +1,451 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright © 2022-2023 Intel Corporation + +"""Interpreter for converting Cargo Toml definitions to Meson AST + +There are some notable limits here. We don't even try to convert something with +a build.rs: there's so few limits on what Cargo allows a build.rs (basically +none), and no good way for us to convert them. In that case, an actual meson +port will be required. +""" + +from __future__ import annotations +import dataclasses +import glob +import importlib +import itertools +import json +import os +import shutil +import typing as T + +from . import builder +from . import version +from .. import mparser +from .._pathlib import Path +from ..mesonlib import MesonException, Popen_safe + +if T.TYPE_CHECKING: + from types import ModuleType + + from . import manifest + from ..environment import Environment + +# tomllib is present in python 3.11, before that it is a pypi module called tomli, +# we try to import tomllib, then tomli, +# TODO: add a fallback to toml2json? +tomllib: T.Optional[ModuleType] = None +toml2json: T.Optional[str] = None +for t in ['tomllib', 'tomli']: + try: + tomllib = importlib.import_module(t) + break + except ImportError: + pass +else: + # TODO: it would be better to use an Executable here, which could be looked + # up in the cross file or provided by a wrap. However, that will have to be + # passed in externally, since we don't have (and I don't think we should), + # have access to the `Environment` for that in this module. + toml2json = shutil.which('toml2json') + + +def load_toml(filename: str) -> T.Dict[object, object]: + if tomllib: + with open(filename, 'rb') as f: + raw = tomllib.load(f) + else: + if toml2json is None: + raise MesonException('Could not find an implementation of tomllib, nor toml2json') + + p, out, err = Popen_safe([toml2json, filename]) + if p.returncode != 0: + raise MesonException('toml2json failed to decode output\n', err) + + raw = json.loads(out) + + if not isinstance(raw, dict): + raise MesonException("Cargo.toml isn't a dictionary? How did that happen?") + + return raw + + +def fixup_meson_varname(name: str) -> str: + """Fixup a meson variable name + + :param name: The name to fix + :return: the fixed name + """ + return name.replace('-', '_') + +# Pylance can figure out that these do not, in fact, overlap, but mypy can't +@T.overload +def _fixup_raw_mappings(d: manifest.BuildTarget) -> manifest.FixedBuildTarget: ... # type: ignore + +@T.overload +def _fixup_raw_mappings(d: manifest.LibTarget) -> manifest.FixedLibTarget: ... # type: ignore + +@T.overload +def _fixup_raw_mappings(d: manifest.Dependency) -> manifest.FixedDependency: ... + +def _fixup_raw_mappings(d: T.Union[manifest.BuildTarget, manifest.LibTarget, manifest.Dependency] + ) -> T.Union[manifest.FixedBuildTarget, manifest.FixedLibTarget, + manifest.FixedDependency]: + """Fixup raw cargo mappings to ones more suitable for python to consume. + + This does the following: + * replaces any `-` with `_`, cargo likes the former, but python dicts make + keys with `-` in them awkward to work with + * Convert Dependndency versions from the cargo format to something meson + understands + + :param d: The mapping to fix + :return: the fixed string + """ + raw = {fixup_meson_varname(k): v for k, v in d.items()} + if 'version' in raw: + assert isinstance(raw['version'], str), 'for mypy' + raw['version'] = version.convert(raw['version']) + return T.cast('T.Union[manifest.FixedBuildTarget, manifest.FixedLibTarget, manifest.FixedDependency]', raw) + + +@dataclasses.dataclass +class Package: + + """Representation of a Cargo Package entry, with defaults filled in.""" + + name: str + version: str + description: str + resolver: T.Optional[str] = None + authors: T.List[str] = dataclasses.field(default_factory=list) + edition: manifest.EDITION = '2015' + rust_version: T.Optional[str] = None + documentation: T.Optional[str] = None + readme: T.Optional[str] = None + homepage: T.Optional[str] = None + repository: T.Optional[str] = None + license: T.Optional[str] = None + license_file: T.Optional[str] = None + keywords: T.List[str] = dataclasses.field(default_factory=list) + categories: T.List[str] = dataclasses.field(default_factory=list) + workspace: T.Optional[str] = None + build: T.Optional[str] = None + links: T.Optional[str] = None + exclude: T.List[str] = dataclasses.field(default_factory=list) + include: T.List[str] = dataclasses.field(default_factory=list) + publish: bool = True + metadata: T.Dict[str, T.Dict[str, str]] = dataclasses.field(default_factory=dict) + default_run: T.Optional[str] = None + autobins: bool = True + autoexamples: bool = True + autotests: bool = True + autobenches: bool = True + + +@dataclasses.dataclass +class Dependency: + + """Representation of a Cargo Dependency Entry.""" + + version: T.List[str] + registry: T.Optional[str] = None + git: T.Optional[str] = None + branch: T.Optional[str] = None + rev: T.Optional[str] = None + path: T.Optional[str] = None + optional: bool = False + package: T.Optional[str] = None + default_features: bool = False + features: T.List[str] = dataclasses.field(default_factory=list) + + @classmethod + def from_raw(cls, raw: manifest.DependencyV) -> Dependency: + """Create a dependency from a raw cargo dictionary""" + if isinstance(raw, str): + return cls(version.convert(raw)) + return cls(**_fixup_raw_mappings(raw)) + + +@dataclasses.dataclass +class BuildTarget: + + name: str + crate_type: T.List[manifest.CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['lib']) + path: dataclasses.InitVar[T.Optional[str]] = None + + # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-test-field + # True for lib, bin, test + test: bool = True + + # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-doctest-field + # True for lib + doctest: bool = False + + # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-bench-field + # True for lib, bin, benchmark + bench: bool = True + + # https://doc.rust-lang.org/cargo/reference/cargo-targets.html#the-doc-field + # True for libraries and binaries + doc: bool = False + + harness: bool = True + edition: manifest.EDITION = '2015' + required_features: T.List[str] = dataclasses.field(default_factory=list) + plugin: bool = False + + +@dataclasses.dataclass +class Library(BuildTarget): + + """Representation of a Cargo Library Entry.""" + + doctest: bool = True + doc: bool = True + proc_macro: bool = False + crate_type: T.List[manifest.CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['lib']) + doc_scrape_examples: bool = True + + +@dataclasses.dataclass +class Binary(BuildTarget): + + """Representation of a Cargo Bin Entry.""" + + doc: bool = True + + +@dataclasses.dataclass +class Test(BuildTarget): + + """Representation of a Cargo Test Entry.""" + + bench: bool = True + + +@dataclasses.dataclass +class Benchmark(BuildTarget): + + """Representation of a Cargo Benchmark Entry.""" + + test: bool = True + + +@dataclasses.dataclass +class Example(BuildTarget): + + """Representation of a Cargo Example Entry.""" + + crate_type: T.List[manifest.CRATE_TYPE] = dataclasses.field(default_factory=lambda: ['bin']) + + +@dataclasses.dataclass +class Manifest: + + """Cargo Manifest definition. + + Most of these values map up to the Cargo Manifest, but with default values + if not provided. + + Cargo subprojects can contain what Meson wants to treat as multiple, + interdependent, subprojects. + + :param subdir: the subdirectory that this cargo project is in + :param path: the path within the cargo subproject. + """ + + package: Package + dependencies: T.Dict[str, Dependency] + dev_dependencies: T.Dict[str, Dependency] + build_dependencies: T.Dict[str, Dependency] + lib: Library + bin: T.List[Binary] + test: T.List[Test] + bench: T.List[Benchmark] + example: T.List[Example] + features: T.Dict[str, T.List[str]] + target: T.Dict[str, T.Dict[str, Dependency]] + subdir: str + path: str = '' + + +def _create_project(package: Package, build: builder.Builder, env: Environment) -> mparser.FunctionNode: + """Create a function call + + :param package: The Cargo package to generate from + :param filename: The full path to the file + :param meson_version: The generating meson version + :return: a FunctionNode + """ + args: T.List[mparser.BaseNode] = [] + args.extend([ + build.string(package.name), + build.string('rust'), + ]) + kwargs: T.Dict[str, mparser.BaseNode] = { + 'version': build.string(package.version), + # Always assume that the generated meson is using the latest features + # This will warn when when we generate deprecated code, which is helpful + # for the upkeep of the module + 'meson_version': build.string(f'>= {env.coredata.version}'), + 'default_options': build.array([build.string(f'rust_std={package.edition}')]), + } + if package.license: + kwargs['license'] = build.string(package.license) + elif package.license_file: + kwargs['license_files'] = build.string(package.license_file) + + return build.function('project', args, kwargs) + + +def _convert_manifest(raw_manifest: manifest.Manifest, subdir: str, path: str = '') -> Manifest: + # This cast is a bit of a hack to deal with proc-macro + lib = _fixup_raw_mappings(raw_manifest.get('lib', {})) + + # We need to set the name field if it's not set manually, + # including if other fields are set in the lib section + lib.setdefault('name', raw_manifest['package']['name']) + + pkg = T.cast('manifest.FixedPackage', + {fixup_meson_varname(k): v for k, v in raw_manifest['package'].items()}) + + return Manifest( + Package(**pkg), + {k: Dependency.from_raw(v) for k, v in raw_manifest.get('dependencies', {}).items()}, + {k: Dependency.from_raw(v) for k, v in raw_manifest.get('dev-dependencies', {}).items()}, + {k: Dependency.from_raw(v) for k, v in raw_manifest.get('build-dependencies', {}).items()}, + Library(**lib), + [Binary(**_fixup_raw_mappings(b)) for b in raw_manifest.get('bin', {})], + [Test(**_fixup_raw_mappings(b)) for b in raw_manifest.get('test', {})], + [Benchmark(**_fixup_raw_mappings(b)) for b in raw_manifest.get('bench', {})], + [Example(**_fixup_raw_mappings(b)) for b in raw_manifest.get('example', {})], + raw_manifest.get('features', {}), + {k: {k2: Dependency.from_raw(v2) for k2, v2 in v['dependencies'].items()} + for k, v in raw_manifest.get('target', {}).items()}, + subdir, + path, + ) + + +def _load_manifests(subdir: str) -> T.Dict[str, Manifest]: + filename = os.path.join(subdir, 'Cargo.toml') + raw = load_toml(filename) + + manifests: T.Dict[str, Manifest] = {} + + raw_manifest: T.Union[manifest.Manifest, manifest.VirtualManifest] + if 'package' in raw: + raw_manifest = T.cast('manifest.Manifest', raw) + manifest_ = _convert_manifest(raw_manifest, subdir) + manifests[manifest_.package.name] = manifest_ + else: + raw_manifest = T.cast('manifest.VirtualManifest', raw) + + if 'workspace' in raw_manifest: + # XXX: need to verify that python glob and cargo globbing are the + # same and probably write a glob implementation. Blarg + + # We need to chdir here to make the glob work correctly + pwd = os.getcwd() + os.chdir(subdir) + members: T.Iterable[str] + try: + members = itertools.chain.from_iterable( + glob.glob(m) for m in raw_manifest['workspace']['members']) + finally: + os.chdir(pwd) + if 'exclude' in raw_manifest['workspace']: + members = (x for x in members if x not in raw_manifest['workspace']['exclude']) + + for m in members: + filename = os.path.join(subdir, m, 'Cargo.toml') + raw = load_toml(filename) + + raw_manifest = T.cast('manifest.Manifest', raw) + man = _convert_manifest(raw_manifest, subdir, m) + manifests[man.package.name] = man + + return manifests + + +def load_all_manifests(subproject_dir: str) -> T.Dict[str, Manifest]: + """Find all cargo subprojects, and load them + + :param subproject_dir: Directory to look for subprojects in + :return: A dictionary of rust project names to Manifests + """ + manifests: T.Dict[str, Manifest] = {} + for p in Path(subproject_dir).iterdir(): + if p.is_dir() and (p / 'Cargo.toml').exists(): + manifests.update(_load_manifests(str(p))) + return manifests + + +def _create_lib(cargo: Manifest, build: builder.Builder) -> T.List[mparser.BaseNode]: + kw: T.Dict[str, mparser.BaseNode] = {} + if cargo.dependencies: + ids = [build.identifier(f'dep_{n}') for n in cargo.dependencies] + kw['dependencies'] = build.array( + [build.method('get_variable', i, [build.string('dep')]) for i in ids]) + + # FIXME: currently assuming that an rlib is being generated, which is + # the most common. + return [ + build.assign( + build.function( + 'static_library', + [ + build.string(fixup_meson_varname(cargo.package.name)), + build.string(os.path.join('src', 'lib.rs')), + ], + kw, + ), + 'lib' + ), + + build.assign( + build.function( + 'declare_dependency', + kw={'link_with': build.identifier('lib'), **kw}, + ), + 'dep' + ) + ] + + +def interpret(cargo: Manifest, env: Environment) -> mparser.CodeBlockNode: + filename = os.path.join(cargo.subdir, cargo.path, 'Cargo.toml') + build = builder.Builder(filename) + + ast: T.List[mparser.BaseNode] = [ + _create_project(cargo.package, build, env), + build.assign(build.function('import', [build.string('rust')]), 'rust'), + ] + + if cargo.dependencies: + for name, dep in cargo.dependencies.items(): + kw = { + 'version': build.array([build.string(s) for s in dep.version]), + } + ast.extend([ + build.assign( + build.method( + 'cargo', + build.identifier('rust'), + [build.string(name)], + kw, + ), + f'dep_{fixup_meson_varname(name)}', + ), + ]) + + # Libs are always auto-discovered and there's no other way to handle them, + # which is unfortunate for reproducability + if os.path.exists(os.path.join(env.source_dir, cargo.subdir, cargo.path, 'src', 'lib.rs')): + ast.extend(_create_lib(cargo, build)) + + # XXX: make this not awful + block = builder.block(filename) + block.lines = ast + return block diff --git a/vendored-meson/meson/mesonbuild/cargo/manifest.py b/vendored-meson/meson/mesonbuild/cargo/manifest.py new file mode 100644 index 000000000000..e6192d03cd98 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/cargo/manifest.py @@ -0,0 +1,227 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright © 2022-2023 Intel Corporation + +"""Type definitions for cargo manifest files.""" + +from __future__ import annotations +import typing as T + +from typing_extensions import Literal, TypedDict, Required + +EDITION = Literal['2015', '2018', '2021'] +CRATE_TYPE = Literal['bin', 'lib', 'dylib', 'staticlib', 'cdylib', 'rlib', 'proc-macro'] + +Package = TypedDict( + 'Package', + { + 'name': Required[str], + 'version': Required[str], + 'authors': T.List[str], + 'edition': EDITION, + 'rust-version': str, + 'description': str, + 'readme': str, + 'license': str, + 'license-file': str, + 'keywords': T.List[str], + 'categories': T.List[str], + 'workspace': str, + 'build': str, + 'links': str, + 'include': T.List[str], + 'exclude': T.List[str], + 'publish': bool, + 'metadata': T.Dict[str, T.Dict[str, str]], + 'default-run': str, + 'autobins': bool, + 'autoexamples': bool, + 'autotests': bool, + 'autobenches': bool, + }, + total=False, +) +"""A description of the Package Dictionary.""" + +class FixedPackage(TypedDict, total=False): + + """A description of the Package Dictionary, fixed up.""" + + name: Required[str] + version: Required[str] + authors: T.List[str] + edition: EDITION + rust_version: str + description: str + readme: str + license: str + license_file: str + keywords: T.List[str] + categories: T.List[str] + workspace: str + build: str + links: str + include: T.List[str] + exclude: T.List[str] + publish: bool + metadata: T.Dict[str, T.Dict[str, str]] + default_run: str + autobins: bool + autoexamples: bool + autotests: bool + autobenches: bool + + +class Badge(TypedDict): + + """An entry in the badge section.""" + + status: Literal['actively-developed', 'passively-developed', 'as-is', 'experimental', 'deprecated', 'none'] + + +Dependency = TypedDict( + 'Dependency', + { + 'version': str, + 'registry': str, + 'git': str, + 'branch': str, + 'rev': str, + 'path': str, + 'optional': bool, + 'package': str, + 'default-features': bool, + 'features': T.List[str], + }, + total=False, +) +"""An entry in the *dependencies sections.""" + + +class FixedDependency(TypedDict, total=False): + + """An entry in the *dependencies sections, fixed up.""" + + version: T.List[str] + registry: str + git: str + branch: str + rev: str + path: str + optional: bool + package: str + default_features: bool + features: T.List[str] + + +DependencyV = T.Union[Dependency, str] +"""A Dependency entry, either a string or a Dependency Dict.""" + + +_BaseBuildTarget = TypedDict( + '_BaseBuildTarget', + { + 'path': str, + 'test': bool, + 'doctest': bool, + 'bench': bool, + 'doc': bool, + 'plugin': bool, + 'proc-macro': bool, + 'harness': bool, + 'edition': EDITION, + 'crate-type': T.List[CRATE_TYPE], + 'required-features': T.List[str], + }, + total=False, +) + + +class BuildTarget(_BaseBuildTarget, total=False): + + name: Required[str] + +class LibTarget(_BaseBuildTarget, total=False): + + name: str + + +class _BaseFixedBuildTarget(TypedDict, total=False): + path: str + test: bool + doctest: bool + bench: bool + doc: bool + plugin: bool + harness: bool + edition: EDITION + crate_type: T.List[CRATE_TYPE] + required_features: T.List[str] + + +class FixedBuildTarget(_BaseFixedBuildTarget, total=False): + + name: str + +class FixedLibTarget(_BaseFixedBuildTarget, total=False): + + name: Required[str] + proc_macro: bool + + +class Target(TypedDict): + + """Target entry in the Manifest File.""" + + dependencies: T.Dict[str, DependencyV] + + +class Workspace(TypedDict): + + """The representation of a workspace. + + In a vritual manifest the :attribute:`members` is always present, but in a + project manifest, an empty workspace may be provided, in which case the + workspace is implicitly filled in by values from the path based dependencies. + + the :attribute:`exclude` is always optional + """ + + members: T.List[str] + exclude: T.List[str] + + +Manifest = TypedDict( + 'Manifest', + { + 'package': Package, + 'badges': T.Dict[str, Badge], + 'dependencies': T.Dict[str, DependencyV], + 'dev-dependencies': T.Dict[str, DependencyV], + 'build-dependencies': T.Dict[str, DependencyV], + 'lib': LibTarget, + 'bin': T.List[BuildTarget], + 'test': T.List[BuildTarget], + 'bench': T.List[BuildTarget], + 'example': T.List[BuildTarget], + 'features': T.Dict[str, T.List[str]], + 'target': T.Dict[str, Target], + 'workspace': Workspace, + + # TODO: patch? + # TODO: replace? + }, + total=False, +) +"""The Cargo Manifest format.""" + + +class VirtualManifest(TypedDict): + + """The Representation of a virtual manifest. + + Cargo allows a root manifest that contains only a workspace, this is called + a virtual manifest. This doesn't really map 1:1 with any meson concept, + except perhaps the proposed "meta project". + """ + + workspace: Workspace diff --git a/vendored-meson/meson/mesonbuild/cargo/version.py b/vendored-meson/meson/mesonbuild/cargo/version.py new file mode 100644 index 000000000000..cb09a004b8d7 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/cargo/version.py @@ -0,0 +1,96 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright © 2022-2023 Intel Corporation + +"""Convert Cargo versions into Meson compatible ones.""" + +from __future__ import annotations +import typing as T + + +def convert(cargo_ver: str) -> T.List[str]: + """Convert a Cargo compatible version into a Meson compatible one. + + :param cargo_ver: The version, as Cargo specifies + :return: A list of version constraints, as Meson understands them + """ + # Cleanup, just for safety + cargo_ver = cargo_ver.strip() + cargo_vers = [c.strip() for c in cargo_ver.split(',')] + + out: T.List[str] = [] + + for ver in cargo_vers: + # This covers >= and =< as well + # https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#comparison-requirements + if ver.startswith(('>', '<', '=')): + out.append(ver) + + elif ver.startswith('~'): + # Rust has these tilde requirements, which means that it is >= to + # the version, but less than the next version + # https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#tilde-requirements + # we convert those into a pair of constraints + v = ver[1:].split('.') + out.append(f'>= {".".join(v)}') + if len(v) == 3: + out.append(f'< {v[0]}.{int(v[1]) + 1}.0') + elif len(v) == 2: + out.append(f'< {v[0]}.{int(v[1]) + 1}') + else: + out.append(f'< {int(v[0]) + 1}') + + elif '*' in ver: + # Rust has astrisk requirements,, which are like 1.* == ~1 + # https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#wildcard-requirements + v = ver.split('.')[:-1] + if v: + out.append(f'>= {".".join(v)}') + if len(v) == 2: + out.append(f'< {v[0]}.{int(v[1]) + 1}') + elif len(v) == 1: + out.append(f'< {int(v[0]) + 1}') + + else: + # a Caret version is equivalent to the default strategy + # https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#caret-requirements + if ver.startswith('^'): + ver = ver[1:] + + # If there is no qualifier, then it means this or the next non-zero version + # That means that if this is `1.1.0``, then we need `>= 1.1.0` && `< 2.0.0` + # Or if we have `0.1.0`, then we need `>= 0.1.0` && `< 0.2.0` + # Or if we have `0.1`, then we need `>= 0.1.0` && `< 0.2.0` + # Or if we have `0.0.0`, then we need `< 1.0.0` + # Or if we have `0.0`, then we need `< 1.0.0` + # Or if we have `0`, then we need `< 1.0.0` + # Or if we have `0.0.3`, then we need `>= 0.0.3` && `< 0.0.4` + # https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html#specifying-dependencies-from-cratesio + # + # this works much like the ~ versions, but in reverse. Tilde starts + # at the patch version and works up, to the major version, while + # bare numbers start at the major version and work down to the patch + # version + vers = ver.split('.') + min_: T.List[str] = [] + max_: T.List[str] = [] + bumped = False + for v_ in vers: + if v_ != '0' and not bumped: + min_.append(v_) + max_.append(str(int(v_) + 1)) + bumped = True + else: + if not (bumped and v_ == '0'): + min_.append(v_) + if not bumped: + max_.append('0') + + # If there is no minimum, don't emit one + if set(min_) != {'0'}: + out.append('>= {}'.format('.'.join(min_))) + if set(max_) != {'0'}: + out.append('< {}'.format('.'.join(max_))) + else: + out.append('< 1') + + return out diff --git a/vendored-meson/meson/mesonbuild/cmake/__init__.py b/vendored-meson/meson/mesonbuild/cmake/__init__.py new file mode 100644 index 000000000000..e9d7f2a8b7d4 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/cmake/__init__.py @@ -0,0 +1,39 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool. + +__all__ = [ + 'CMakeExecutor', + 'CMakeExecScope', + 'CMakeException', + 'CMakeInterpreter', + 'CMakeTarget', + 'CMakeToolchain', + 'CMakeTraceParser', + 'TargetOptions', + 'language_map', + 'cmake_defines_to_args', + 'check_cmake_args', + 'cmake_is_debug', + 'resolve_cmake_trace_targets', +] + +from .common import CMakeException, TargetOptions, cmake_defines_to_args, language_map, check_cmake_args, cmake_is_debug +from .executor import CMakeExecutor +from .interpreter import CMakeInterpreter +from .toolchain import CMakeToolchain, CMakeExecScope +from .traceparser import CMakeTarget, CMakeTraceParser +from .tracetargets import resolve_cmake_trace_targets diff --git a/vendored-meson/meson/mesonbuild/cmake/common.py b/vendored-meson/meson/mesonbuild/cmake/common.py new file mode 100644 index 000000000000..3de6c16ace3c --- /dev/null +++ b/vendored-meson/meson/mesonbuild/cmake/common.py @@ -0,0 +1,344 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool. +from __future__ import annotations + +from ..mesonlib import MesonException, OptionKey +from .. import mlog +from pathlib import Path +import typing as T + +if T.TYPE_CHECKING: + from ..environment import Environment + from ..interpreterbase import TYPE_var + +language_map = { + 'c': 'C', + 'cpp': 'CXX', + 'cuda': 'CUDA', + 'objc': 'OBJC', + 'objcpp': 'OBJCXX', + 'cs': 'CSharp', + 'java': 'Java', + 'fortran': 'Fortran', + 'swift': 'Swift', +} + +backend_generator_map = { + 'ninja': 'Ninja', + 'xcode': 'Xcode', + 'vs2010': 'Visual Studio 10 2010', + 'vs2012': 'Visual Studio 11 2012', + 'vs2013': 'Visual Studio 12 2013', + 'vs2015': 'Visual Studio 14 2015', + 'vs2017': 'Visual Studio 15 2017', + 'vs2019': 'Visual Studio 16 2019', + 'vs2022': 'Visual Studio 17 2022', +} + +blacklist_cmake_defs = [ + 'CMAKE_TOOLCHAIN_FILE', + 'CMAKE_PROJECT_INCLUDE', + 'MESON_PRELOAD_FILE', + 'MESON_PS_CMAKE_CURRENT_BINARY_DIR', + 'MESON_PS_CMAKE_CURRENT_SOURCE_DIR', + 'MESON_PS_DELAYED_CALLS', + 'MESON_PS_LOADED', + 'MESON_FIND_ROOT_PATH', + 'MESON_CMAKE_SYSROOT', + 'MESON_PATHS_LIST', + 'MESON_CMAKE_ROOT', +] + +def cmake_is_debug(env: 'Environment') -> bool: + if OptionKey('b_vscrt') in env.coredata.options: + is_debug = env.coredata.get_option(OptionKey('buildtype')) == 'debug' + if env.coredata.options[OptionKey('b_vscrt')].value in {'mdd', 'mtd'}: + is_debug = True + return is_debug + else: + # Don't directly assign to is_debug to make mypy happy + debug_opt = env.coredata.get_option(OptionKey('debug')) + assert isinstance(debug_opt, bool) + return debug_opt + +class CMakeException(MesonException): + pass + +class CMakeBuildFile: + def __init__(self, file: Path, is_cmake: bool, is_temp: bool) -> None: + self.file = file + self.is_cmake = is_cmake + self.is_temp = is_temp + + def __repr__(self) -> str: + return f'<{self.__class__.__name__}: {self.file}; cmake={self.is_cmake}; temp={self.is_temp}>' + +def _flags_to_list(raw: str) -> T.List[str]: + # Convert a raw commandline string into a list of strings + res = [] + curr = '' + escape = False + in_string = False + for i in raw: + if escape: + # If the current char is not a quote, the '\' is probably important + if i not in ['"', "'"]: + curr += '\\' + curr += i + escape = False + elif i == '\\': + escape = True + elif i in {'"', "'"}: + in_string = not in_string + elif i in {' ', '\n'}: + if in_string: + curr += i + else: + res += [curr] + curr = '' + else: + curr += i + res += [curr] + res = [r for r in res if len(r) > 0] + return res + +def cmake_get_generator_args(env: 'Environment') -> T.List[str]: + backend_name = env.coredata.get_option(OptionKey('backend')) + assert isinstance(backend_name, str) + assert backend_name in backend_generator_map + return ['-G', backend_generator_map[backend_name]] + +def cmake_defines_to_args(raw: T.List[T.Dict[str, TYPE_var]], permissive: bool = False) -> T.List[str]: + res = [] # type: T.List[str] + + for i in raw: + for key, val in i.items(): + if key in blacklist_cmake_defs: + mlog.warning('Setting', mlog.bold(key), 'is not supported. See the meson docs for cross compilation support:') + mlog.warning(' - URL: https://mesonbuild.com/CMake-module.html#cross-compilation') + mlog.warning(' --> Ignoring this option') + continue + if isinstance(val, (str, int, float)): + res += [f'-D{key}={val}'] + elif isinstance(val, bool): + val_str = 'ON' if val else 'OFF' + res += [f'-D{key}={val_str}'] + else: + raise MesonException('Type "{}" of "{}" is not supported as for a CMake define value'.format(type(val).__name__, key)) + + return res + +# TODO: this function will become obsolete once the `cmake_args` kwarg is dropped +def check_cmake_args(args: T.List[str]) -> T.List[str]: + res = [] # type: T.List[str] + dis = ['-D' + x for x in blacklist_cmake_defs] + assert dis # Ensure that dis is not empty. + for i in args: + if any(i.startswith(x) for x in dis): + mlog.warning('Setting', mlog.bold(i), 'is not supported. See the meson docs for cross compilation support:') + mlog.warning(' - URL: https://mesonbuild.com/CMake-module.html#cross-compilation') + mlog.warning(' --> Ignoring this option') + continue + res += [i] + return res + +class CMakeInclude: + def __init__(self, path: Path, isSystem: bool = False): + self.path = path + self.isSystem = isSystem + + def __repr__(self) -> str: + return f'' + +class CMakeFileGroup: + def __init__(self, data: T.Dict[str, T.Any]) -> None: + self.defines = data.get('defines', '') # type: str + self.flags = _flags_to_list(data.get('compileFlags', '')) # type: T.List[str] + self.is_generated = data.get('isGenerated', False) # type: bool + self.language = data.get('language', 'C') # type: str + self.sources = [Path(x) for x in data.get('sources', [])] # type: T.List[Path] + + # Fix the include directories + self.includes = [] # type: T.List[CMakeInclude] + for i in data.get('includePath', []): + if isinstance(i, dict) and 'path' in i: + isSystem = i.get('isSystem', False) + assert isinstance(isSystem, bool) + assert isinstance(i['path'], str) + self.includes += [CMakeInclude(Path(i['path']), isSystem)] + elif isinstance(i, str): + self.includes += [CMakeInclude(Path(i))] + + def log(self) -> None: + mlog.log('flags =', mlog.bold(', '.join(self.flags))) + mlog.log('defines =', mlog.bold(', '.join(self.defines))) + mlog.log('includes =', mlog.bold(', '.join([str(x) for x in self.includes]))) + mlog.log('is_generated =', mlog.bold('true' if self.is_generated else 'false')) + mlog.log('language =', mlog.bold(self.language)) + mlog.log('sources:') + for i in self.sources: + with mlog.nested(): + mlog.log(i.as_posix()) + +class CMakeTarget: + def __init__(self, data: T.Dict[str, T.Any]) -> None: + self.artifacts = [Path(x) for x in data.get('artifacts', [])] # type: T.List[Path] + self.src_dir = Path(data.get('sourceDirectory', '')) # type: Path + self.build_dir = Path(data.get('buildDirectory', '')) # type: Path + self.name = data.get('name', '') # type: str + self.full_name = data.get('fullName', '') # type: str + self.install = data.get('hasInstallRule', False) # type: bool + self.install_paths = [Path(x) for x in set(data.get('installPaths', []))] # type: T.List[Path] + self.link_lang = data.get('linkerLanguage', '') # type: str + self.link_libraries = _flags_to_list(data.get('linkLibraries', '')) # type: T.List[str] + self.link_flags = _flags_to_list(data.get('linkFlags', '')) # type: T.List[str] + self.link_lang_flags = _flags_to_list(data.get('linkLanguageFlags', '')) # type: T.List[str] + # self.link_path = Path(data.get('linkPath', '')) # type: Path + self.type = data.get('type', 'EXECUTABLE') # type: str + # self.is_generator_provided = data.get('isGeneratorProvided', False) # type: bool + self.files = [] # type: T.List[CMakeFileGroup] + + for i in data.get('fileGroups', []): + self.files += [CMakeFileGroup(i)] + + def log(self) -> None: + mlog.log('artifacts =', mlog.bold(', '.join([x.as_posix() for x in self.artifacts]))) + mlog.log('src_dir =', mlog.bold(self.src_dir.as_posix())) + mlog.log('build_dir =', mlog.bold(self.build_dir.as_posix())) + mlog.log('name =', mlog.bold(self.name)) + mlog.log('full_name =', mlog.bold(self.full_name)) + mlog.log('install =', mlog.bold('true' if self.install else 'false')) + mlog.log('install_paths =', mlog.bold(', '.join([x.as_posix() for x in self.install_paths]))) + mlog.log('link_lang =', mlog.bold(self.link_lang)) + mlog.log('link_libraries =', mlog.bold(', '.join(self.link_libraries))) + mlog.log('link_flags =', mlog.bold(', '.join(self.link_flags))) + mlog.log('link_lang_flags =', mlog.bold(', '.join(self.link_lang_flags))) + # mlog.log('link_path =', mlog.bold(self.link_path)) + mlog.log('type =', mlog.bold(self.type)) + # mlog.log('is_generator_provided =', mlog.bold('true' if self.is_generator_provided else 'false')) + for idx, i in enumerate(self.files): + mlog.log(f'Files {idx}:') + with mlog.nested(): + i.log() + +class CMakeProject: + def __init__(self, data: T.Dict[str, T.Any]) -> None: + self.src_dir = Path(data.get('sourceDirectory', '')) # type: Path + self.build_dir = Path(data.get('buildDirectory', '')) # type: Path + self.name = data.get('name', '') # type: str + self.targets = [] # type: T.List[CMakeTarget] + + for i in data.get('targets', []): + self.targets += [CMakeTarget(i)] + + def log(self) -> None: + mlog.log('src_dir =', mlog.bold(self.src_dir.as_posix())) + mlog.log('build_dir =', mlog.bold(self.build_dir.as_posix())) + mlog.log('name =', mlog.bold(self.name)) + for idx, i in enumerate(self.targets): + mlog.log(f'Target {idx}:') + with mlog.nested(): + i.log() + +class CMakeConfiguration: + def __init__(self, data: T.Dict[str, T.Any]) -> None: + self.name = data.get('name', '') # type: str + self.projects = [] # type: T.List[CMakeProject] + for i in data.get('projects', []): + self.projects += [CMakeProject(i)] + + def log(self) -> None: + mlog.log('name =', mlog.bold(self.name)) + for idx, i in enumerate(self.projects): + mlog.log(f'Project {idx}:') + with mlog.nested(): + i.log() + +class SingleTargetOptions: + def __init__(self) -> None: + self.opts = {} # type: T.Dict[str, str] + self.lang_args = {} # type: T.Dict[str, T.List[str]] + self.link_args = [] # type: T.List[str] + self.install = 'preserve' + + def set_opt(self, opt: str, val: str) -> None: + self.opts[opt] = val + + def append_args(self, lang: str, args: T.List[str]) -> None: + if lang not in self.lang_args: + self.lang_args[lang] = [] + self.lang_args[lang] += args + + def append_link_args(self, args: T.List[str]) -> None: + self.link_args += args + + def set_install(self, install: bool) -> None: + self.install = 'true' if install else 'false' + + def get_override_options(self, initial: T.List[str]) -> T.List[str]: + res = [] # type: T.List[str] + for i in initial: + opt = i[:i.find('=')] + if opt not in self.opts: + res += [i] + res += [f'{k}={v}' for k, v in self.opts.items()] + return res + + def get_compile_args(self, lang: str, initial: T.List[str]) -> T.List[str]: + if lang in self.lang_args: + return initial + self.lang_args[lang] + return initial + + def get_link_args(self, initial: T.List[str]) -> T.List[str]: + return initial + self.link_args + + def get_install(self, initial: bool) -> bool: + return {'preserve': initial, 'true': True, 'false': False}[self.install] + +class TargetOptions: + def __init__(self) -> None: + self.global_options = SingleTargetOptions() + self.target_options = {} # type: T.Dict[str, SingleTargetOptions] + + def __getitem__(self, tgt: str) -> SingleTargetOptions: + if tgt not in self.target_options: + self.target_options[tgt] = SingleTargetOptions() + return self.target_options[tgt] + + def get_override_options(self, tgt: str, initial: T.List[str]) -> T.List[str]: + initial = self.global_options.get_override_options(initial) + if tgt in self.target_options: + initial = self.target_options[tgt].get_override_options(initial) + return initial + + def get_compile_args(self, tgt: str, lang: str, initial: T.List[str]) -> T.List[str]: + initial = self.global_options.get_compile_args(lang, initial) + if tgt in self.target_options: + initial = self.target_options[tgt].get_compile_args(lang, initial) + return initial + + def get_link_args(self, tgt: str, initial: T.List[str]) -> T.List[str]: + initial = self.global_options.get_link_args(initial) + if tgt in self.target_options: + initial = self.target_options[tgt].get_link_args(initial) + return initial + + def get_install(self, tgt: str, initial: bool) -> bool: + initial = self.global_options.get_install(initial) + if tgt in self.target_options: + initial = self.target_options[tgt].get_install(initial) + return initial diff --git a/vendored-meson/meson/mesonbuild/cmake/data/__init__.py b/vendored-meson/meson/mesonbuild/cmake/data/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/vendored-meson/meson/mesonbuild/cmake/data/preload.cmake b/vendored-meson/meson/mesonbuild/cmake/data/preload.cmake new file mode 100644 index 000000000000..234860b75c7b --- /dev/null +++ b/vendored-meson/meson/mesonbuild/cmake/data/preload.cmake @@ -0,0 +1,82 @@ +if(MESON_PS_LOADED) + return() +endif() + +set(MESON_PS_LOADED ON) + +cmake_policy(PUSH) +cmake_policy(SET CMP0054 NEW) # https://cmake.org/cmake/help/latest/policy/CMP0054.html + +# Dummy macros that have a special meaning in the meson code +macro(meson_ps_execute_delayed_calls) +endmacro() + +macro(meson_ps_reload_vars) +endmacro() + +macro(meson_ps_disabled_function) + message(WARNING "The function '${ARGV0}' is disabled in the context of CMake subprojects.\n" + "This should not be an issue but may lead to compilation errors.") +endmacro() + +# Helper macro to inspect the current CMake state +macro(meson_ps_inspect_vars) + set(MESON_PS_CMAKE_CURRENT_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}") + set(MESON_PS_CMAKE_CURRENT_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}") + meson_ps_execute_delayed_calls() +endmacro() + + +# Override some system functions with custom code and forward the args +# to the original function +macro(add_custom_command) + meson_ps_inspect_vars() + _add_custom_command(${ARGV}) +endmacro() + +macro(add_custom_target) + meson_ps_inspect_vars() + _add_custom_target(${ARGV}) +endmacro() + +macro(set_property) + meson_ps_inspect_vars() + _set_property(${ARGV}) +endmacro() + +function(set_source_files_properties) + set(FILES) + set(I 0) + set(PROPERTIES OFF) + + while(I LESS ARGC) + if(NOT PROPERTIES) + if("${ARGV${I}}" STREQUAL "PROPERTIES") + set(PROPERTIES ON) + else() + list(APPEND FILES "${ARGV${I}}") + endif() + + math(EXPR I "${I} + 1") + else() + set(ID_IDX ${I}) + math(EXPR PROP_IDX "${ID_IDX} + 1") + + set(ID "${ARGV${ID_IDX}}") + set(PROP "${ARGV${PROP_IDX}}") + + set_property(SOURCE ${FILES} PROPERTY "${ID}" "${PROP}") + math(EXPR I "${I} + 2") + endif() + endwhile() +endfunction() + +# Disable some functions that would mess up the CMake meson integration +macro(target_precompile_headers) + meson_ps_disabled_function(target_precompile_headers) +endmacro() + +set(MESON_PS_DELAYED_CALLS add_custom_command;add_custom_target;set_property) +meson_ps_reload_vars() + +cmake_policy(POP) diff --git a/vendored-meson/meson/mesonbuild/cmake/executor.py b/vendored-meson/meson/mesonbuild/cmake/executor.py new file mode 100644 index 000000000000..c22c0ca99893 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/cmake/executor.py @@ -0,0 +1,254 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool. +from __future__ import annotations + +import subprocess as S +from threading import Thread +import typing as T +import re +import os + +from .. import mlog +from ..mesonlib import PerMachine, Popen_safe, version_compare, is_windows, OptionKey +from ..programs import find_external_program, NonExistingExternalProgram + +if T.TYPE_CHECKING: + from pathlib import Path + + from ..environment import Environment + from ..mesonlib import MachineChoice + from ..programs import ExternalProgram + + TYPE_result = T.Tuple[int, T.Optional[str], T.Optional[str]] + TYPE_cache_key = T.Tuple[str, T.Tuple[str, ...], str, T.FrozenSet[T.Tuple[str, str]]] + +class CMakeExecutor: + # The class's copy of the CMake path. Avoids having to search for it + # multiple times in the same Meson invocation. + class_cmakebin = PerMachine(None, None) # type: PerMachine[T.Optional[ExternalProgram]] + class_cmakevers = PerMachine(None, None) # type: PerMachine[T.Optional[str]] + class_cmake_cache = {} # type: T.Dict[T.Any, TYPE_result] + + def __init__(self, environment: 'Environment', version: str, for_machine: MachineChoice, silent: bool = False): + self.min_version = version + self.environment = environment + self.for_machine = for_machine + self.cmakebin, self.cmakevers = self.find_cmake_binary(self.environment, silent=silent) + self.always_capture_stderr = True + self.print_cmout = False + self.prefix_paths = [] # type: T.List[str] + self.extra_cmake_args = [] # type: T.List[str] + + if self.cmakebin is None: + return + + if not version_compare(self.cmakevers, self.min_version): + mlog.warning( + 'The version of CMake', mlog.bold(self.cmakebin.get_path()), + 'is', mlog.bold(self.cmakevers), 'but version', mlog.bold(self.min_version), + 'is required') + self.cmakebin = None + return + + self.prefix_paths = self.environment.coredata.options[OptionKey('cmake_prefix_path', machine=self.for_machine)].value + if self.prefix_paths: + self.extra_cmake_args += ['-DCMAKE_PREFIX_PATH={}'.format(';'.join(self.prefix_paths))] + + def find_cmake_binary(self, environment: 'Environment', silent: bool = False) -> T.Tuple[T.Optional['ExternalProgram'], T.Optional[str]]: + # Only search for CMake the first time and store the result in the class + # definition + if isinstance(CMakeExecutor.class_cmakebin[self.for_machine], NonExistingExternalProgram): + mlog.debug(f'CMake binary for {self.for_machine} is cached as not found') + return None, None + elif CMakeExecutor.class_cmakebin[self.for_machine] is not None: + mlog.debug(f'CMake binary for {self.for_machine} is cached.') + else: + assert CMakeExecutor.class_cmakebin[self.for_machine] is None + + mlog.debug(f'CMake binary for {self.for_machine} is not cached') + for potential_cmakebin in find_external_program( + environment, self.for_machine, 'cmake', 'CMake', + environment.default_cmake, allow_default_for_cross=False): + version_if_ok = self.check_cmake(potential_cmakebin) + if not version_if_ok: + continue + if not silent: + mlog.log('Found CMake:', mlog.bold(potential_cmakebin.get_path()), + f'({version_if_ok})') + CMakeExecutor.class_cmakebin[self.for_machine] = potential_cmakebin + CMakeExecutor.class_cmakevers[self.for_machine] = version_if_ok + break + else: + if not silent: + mlog.log('Found CMake:', mlog.red('NO')) + # Set to False instead of None to signify that we've already + # searched for it and not found it + CMakeExecutor.class_cmakebin[self.for_machine] = NonExistingExternalProgram() + CMakeExecutor.class_cmakevers[self.for_machine] = None + return None, None + + return CMakeExecutor.class_cmakebin[self.for_machine], CMakeExecutor.class_cmakevers[self.for_machine] + + def check_cmake(self, cmakebin: 'ExternalProgram') -> T.Optional[str]: + if not cmakebin.found(): + mlog.log(f'Did not find CMake {cmakebin.name!r}') + return None + try: + cmd = cmakebin.get_command() + p, out = Popen_safe(cmd + ['--version'])[0:2] + if p.returncode != 0: + mlog.warning('Found CMake {!r} but couldn\'t run it' + ''.format(' '.join(cmd))) + return None + except FileNotFoundError: + mlog.warning('We thought we found CMake {!r} but now it\'s not there. How odd!' + ''.format(' '.join(cmd))) + return None + except PermissionError: + msg = 'Found CMake {!r} but didn\'t have permissions to run it.'.format(' '.join(cmd)) + if not is_windows(): + msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.' + mlog.warning(msg) + return None + + cmvers = re.search(r'(cmake|cmake3)\s*version\s*([\d.]+)', out) + if cmvers is not None: + return cmvers.group(2) + mlog.warning(f'We thought we found CMake {cmd!r}, but it was missing the expected ' + 'version string in its output.') + return None + + def set_exec_mode(self, print_cmout: T.Optional[bool] = None, always_capture_stderr: T.Optional[bool] = None) -> None: + if print_cmout is not None: + self.print_cmout = print_cmout + if always_capture_stderr is not None: + self.always_capture_stderr = always_capture_stderr + + def _cache_key(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_cache_key: + fenv = frozenset(env.items()) if env is not None else frozenset() + targs = tuple(args) + return (self.cmakebin.get_path(), targs, build_dir.as_posix(), fenv) + + def _call_cmout_stderr(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result: + cmd = self.cmakebin.get_command() + args + proc = S.Popen(cmd, stdout=S.PIPE, stderr=S.PIPE, cwd=str(build_dir), env=env) # TODO [PYTHON_37]: drop Path conversion + + # stdout and stderr MUST be read at the same time to avoid pipe + # blocking issues. The easiest way to do this is with a separate + # thread for one of the pipes. + def print_stdout() -> None: + while True: + line = proc.stdout.readline() + if not line: + break + mlog.log(line.decode(errors='ignore').strip('\n')) + proc.stdout.close() + + t = Thread(target=print_stdout) + t.start() + + try: + # Read stderr line by line and log non trace lines + raw_trace = '' + tline_start_reg = re.compile(r'^\s*(.*\.(cmake|txt))\(([0-9]+)\):\s*(\w+)\(.*$') + inside_multiline_trace = False + while True: + line_raw = proc.stderr.readline() + if not line_raw: + break + line = line_raw.decode(errors='ignore') + if tline_start_reg.match(line): + raw_trace += line + inside_multiline_trace = not line.endswith(' )\n') + elif inside_multiline_trace: + raw_trace += line + else: + mlog.warning(line.strip('\n')) + + finally: + proc.stderr.close() + t.join() + proc.wait() + + return proc.returncode, None, raw_trace + + def _call_cmout(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result: + cmd = self.cmakebin.get_command() + args + proc = S.Popen(cmd, stdout=S.PIPE, stderr=S.STDOUT, cwd=str(build_dir), env=env) # TODO [PYTHON_37]: drop Path conversion + while True: + line = proc.stdout.readline() + if not line: + break + mlog.log(line.decode(errors='ignore').strip('\n')) + proc.stdout.close() + proc.wait() + return proc.returncode, None, None + + def _call_quiet(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result: + build_dir.mkdir(parents=True, exist_ok=True) + cmd = self.cmakebin.get_command() + args + ret = S.run(cmd, env=env, cwd=str(build_dir), close_fds=False, + stdout=S.PIPE, stderr=S.PIPE, universal_newlines=False) # TODO [PYTHON_37]: drop Path conversion + rc = ret.returncode + out = ret.stdout.decode(errors='ignore') + err = ret.stderr.decode(errors='ignore') + return rc, out, err + + def _call_impl(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]]) -> TYPE_result: + mlog.debug(f'Calling CMake ({self.cmakebin.get_command()}) in {build_dir} with:') + for i in args: + mlog.debug(f' - "{i}"') + if not self.print_cmout: + return self._call_quiet(args, build_dir, env) + else: + if self.always_capture_stderr: + return self._call_cmout_stderr(args, build_dir, env) + else: + return self._call_cmout(args, build_dir, env) + + def call(self, args: T.List[str], build_dir: Path, env: T.Optional[T.Dict[str, str]] = None, disable_cache: bool = False) -> TYPE_result: + if env is None: + env = os.environ.copy() + + args = args + self.extra_cmake_args + if disable_cache: + return self._call_impl(args, build_dir, env) + + # First check if cached, if not call the real cmake function + cache = CMakeExecutor.class_cmake_cache + key = self._cache_key(args, build_dir, env) + if key not in cache: + cache[key] = self._call_impl(args, build_dir, env) + return cache[key] + + def found(self) -> bool: + return self.cmakebin is not None + + def version(self) -> str: + return self.cmakevers + + def executable_path(self) -> str: + return self.cmakebin.get_path() + + def get_command(self) -> T.List[str]: + return self.cmakebin.get_command() + + def get_cmake_prefix_paths(self) -> T.List[str]: + return self.prefix_paths + + def machine_choice(self) -> MachineChoice: + return self.for_machine diff --git a/vendored-meson/meson/mesonbuild/cmake/fileapi.py b/vendored-meson/meson/mesonbuild/cmake/fileapi.py new file mode 100644 index 000000000000..9605f920da4a --- /dev/null +++ b/vendored-meson/meson/mesonbuild/cmake/fileapi.py @@ -0,0 +1,321 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from .common import CMakeException, CMakeBuildFile, CMakeConfiguration +import typing as T +from .. import mlog +from pathlib import Path +import json +import re + +STRIP_KEYS = ['cmake', 'reply', 'backtrace', 'backtraceGraph', 'version'] + +class CMakeFileAPI: + def __init__(self, build_dir: Path): + self.build_dir = build_dir + self.api_base_dir = self.build_dir / '.cmake' / 'api' / 'v1' + self.request_dir = self.api_base_dir / 'query' / 'client-meson' + self.reply_dir = self.api_base_dir / 'reply' + self.cmake_sources = [] # type: T.List[CMakeBuildFile] + self.cmake_configurations = [] # type: T.List[CMakeConfiguration] + self.kind_resolver_map = { + 'codemodel': self._parse_codemodel, + 'cmakeFiles': self._parse_cmakeFiles, + } + + def get_cmake_sources(self) -> T.List[CMakeBuildFile]: + return self.cmake_sources + + def get_cmake_configurations(self) -> T.List[CMakeConfiguration]: + return self.cmake_configurations + + def setup_request(self) -> None: + self.request_dir.mkdir(parents=True, exist_ok=True) + + query = { + 'requests': [ + {'kind': 'codemodel', 'version': {'major': 2, 'minor': 0}}, + {'kind': 'cmakeFiles', 'version': {'major': 1, 'minor': 0}}, + ] + } + + query_file = self.request_dir / 'query.json' + query_file.write_text(json.dumps(query, indent=2), encoding='utf-8') + + def load_reply(self) -> None: + if not self.reply_dir.is_dir(): + raise CMakeException('No response from the CMake file API') + + root = None + reg_index = re.compile(r'^index-.*\.json$') + for i in self.reply_dir.iterdir(): + if reg_index.match(i.name): + root = i + break + + if not root: + raise CMakeException('Failed to find the CMake file API index') + + index = self._reply_file_content(root) # Load the root index + index = self._strip_data(index) # Avoid loading duplicate files + index = self._resolve_references(index) # Load everything + index = self._strip_data(index) # Strip unused data (again for loaded files) + + # Debug output + debug_json = self.build_dir / '..' / 'fileAPI.json' + debug_json = debug_json.resolve() + debug_json.write_text(json.dumps(index, indent=2), encoding='utf-8') + mlog.cmd_ci_include(debug_json.as_posix()) + + # parse the JSON + for i in index['objects']: + assert isinstance(i, dict) + assert 'kind' in i + assert i['kind'] in self.kind_resolver_map + + self.kind_resolver_map[i['kind']](i) + + def _parse_codemodel(self, data: T.Dict[str, T.Any]) -> None: + assert 'configurations' in data + assert 'paths' in data + + source_dir = data['paths']['source'] + build_dir = data['paths']['build'] + + # The file API output differs quite a bit from the server + # output. It is more flat than the server output and makes + # heavy use of references. Here these references are + # resolved and the resulting data structure is identical + # to the CMake serve output. + + def helper_parse_dir(dir_entry: T.Dict[str, T.Any]) -> T.Tuple[Path, Path]: + src_dir = Path(dir_entry.get('source', '.')) + bld_dir = Path(dir_entry.get('build', '.')) + src_dir = src_dir if src_dir.is_absolute() else source_dir / src_dir + bld_dir = bld_dir if bld_dir.is_absolute() else build_dir / bld_dir + src_dir = src_dir.resolve() + bld_dir = bld_dir.resolve() + + return src_dir, bld_dir + + def parse_sources(comp_group: T.Dict[str, T.Any], tgt: T.Dict[str, T.Any]) -> T.Tuple[T.List[Path], T.List[Path], T.List[int]]: + gen = [] + src = [] + idx = [] + + src_list_raw = tgt.get('sources', []) + for i in comp_group.get('sourceIndexes', []): + if i >= len(src_list_raw) or 'path' not in src_list_raw[i]: + continue + if src_list_raw[i].get('isGenerated', False): + gen += [Path(src_list_raw[i]['path'])] + else: + src += [Path(src_list_raw[i]['path'])] + idx += [i] + + return src, gen, idx + + def parse_target(tgt: T.Dict[str, T.Any]) -> T.Dict[str, T.Any]: + src_dir, bld_dir = helper_parse_dir(cnf.get('paths', {})) + + # Parse install paths (if present) + install_paths = [] + if 'install' in tgt: + prefix = Path(tgt['install']['prefix']['path']) + install_paths = [prefix / x['path'] for x in tgt['install']['destinations']] + install_paths = list(set(install_paths)) + + # On the first look, it looks really nice that the CMake devs have + # decided to use arrays for the linker flags. However, this feeling + # soon turns into despair when you realize that there only one entry + # per type in most cases, and we still have to do manual string splitting. + link_flags = [] + link_libs = [] + for i in tgt.get('link', {}).get('commandFragments', []): + if i['role'] == 'flags': + link_flags += [i['fragment']] + elif i['role'] == 'libraries': + link_libs += [i['fragment']] + elif i['role'] == 'libraryPath': + link_flags += ['-L{}'.format(i['fragment'])] + elif i['role'] == 'frameworkPath': + link_flags += ['-F{}'.format(i['fragment'])] + for i in tgt.get('archive', {}).get('commandFragments', []): + if i['role'] == 'flags': + link_flags += [i['fragment']] + + # TODO The `dependencies` entry is new in the file API. + # maybe we can make use of that in addition to the + # implicit dependency detection + tgt_data = { + 'artifacts': [Path(x.get('path', '')) for x in tgt.get('artifacts', [])], + 'sourceDirectory': src_dir, + 'buildDirectory': bld_dir, + 'name': tgt.get('name', ''), + 'fullName': tgt.get('nameOnDisk', ''), + 'hasInstallRule': 'install' in tgt, + 'installPaths': install_paths, + 'linkerLanguage': tgt.get('link', {}).get('language', 'CXX'), + 'linkLibraries': ' '.join(link_libs), # See previous comment block why we join the array + 'linkFlags': ' '.join(link_flags), # See previous comment block why we join the array + 'type': tgt.get('type', 'EXECUTABLE'), + 'fileGroups': [], + } + + processed_src_idx = [] + for cg in tgt.get('compileGroups', []): + # Again, why an array, when there is usually only one element + # and arguments are separated with spaces... + flags = [] + for i in cg.get('compileCommandFragments', []): + flags += [i['fragment']] + + cg_data = { + 'defines': [x.get('define', '') for x in cg.get('defines', [])], + 'compileFlags': ' '.join(flags), + 'language': cg.get('language', 'C'), + 'isGenerated': None, # Set later, flag is stored per source file + 'sources': [], + 'includePath': cg.get('includes', []), + } + + normal_src, generated_src, src_idx = parse_sources(cg, tgt) + if normal_src: + cg_data = dict(cg_data) + cg_data['isGenerated'] = False + cg_data['sources'] = normal_src + tgt_data['fileGroups'] += [cg_data] + if generated_src: + cg_data = dict(cg_data) + cg_data['isGenerated'] = True + cg_data['sources'] = generated_src + tgt_data['fileGroups'] += [cg_data] + processed_src_idx += src_idx + + # Object libraries have no compile groups, only source groups. + # So we add all the source files to a dummy source group that were + # not found in the previous loop + normal_src = [] + generated_src = [] + for idx, src in enumerate(tgt.get('sources', [])): + if idx in processed_src_idx: + continue + + if src.get('isGenerated', False): + generated_src += [src['path']] + else: + normal_src += [src['path']] + + if normal_src: + tgt_data['fileGroups'] += [{ + 'isGenerated': False, + 'sources': normal_src, + }] + if generated_src: + tgt_data['fileGroups'] += [{ + 'isGenerated': True, + 'sources': generated_src, + }] + return tgt_data + + def parse_project(pro: T.Dict[str, T.Any]) -> T.Dict[str, T.Any]: + # Only look at the first directory specified in directoryIndexes + # TODO Figure out what the other indexes are there for + p_src_dir = source_dir + p_bld_dir = build_dir + try: + p_src_dir, p_bld_dir = helper_parse_dir(cnf['directories'][pro['directoryIndexes'][0]]) + except (IndexError, KeyError): + pass + + pro_data = { + 'name': pro.get('name', ''), + 'sourceDirectory': p_src_dir, + 'buildDirectory': p_bld_dir, + 'targets': [], + } + + for ref in pro.get('targetIndexes', []): + tgt = {} + try: + tgt = cnf['targets'][ref] + except (IndexError, KeyError): + pass + pro_data['targets'] += [parse_target(tgt)] + + return pro_data + + for cnf in data.get('configurations', []): + cnf_data = { + 'name': cnf.get('name', ''), + 'projects': [], + } + + for pro in cnf.get('projects', []): + cnf_data['projects'] += [parse_project(pro)] + + self.cmake_configurations += [CMakeConfiguration(cnf_data)] + + def _parse_cmakeFiles(self, data: T.Dict[str, T.Any]) -> None: + assert 'inputs' in data + assert 'paths' in data + + src_dir = Path(data['paths']['source']) + + for i in data['inputs']: + path = Path(i['path']) + path = path if path.is_absolute() else src_dir / path + self.cmake_sources += [CMakeBuildFile(path, i.get('isCMake', False), i.get('isGenerated', False))] + + def _strip_data(self, data: T.Any) -> T.Any: + if isinstance(data, list): + for idx, i in enumerate(data): + data[idx] = self._strip_data(i) + + elif isinstance(data, dict): + new = {} + for key, val in data.items(): + if key not in STRIP_KEYS: + new[key] = self._strip_data(val) + data = new + + return data + + def _resolve_references(self, data: T.Any) -> T.Any: + if isinstance(data, list): + for idx, i in enumerate(data): + data[idx] = self._resolve_references(i) + + elif isinstance(data, dict): + # Check for the "magic" reference entry and insert + # it into the root data dict + if 'jsonFile' in data: + data.update(self._reply_file_content(data['jsonFile'])) + + for key, val in data.items(): + data[key] = self._resolve_references(val) + + return data + + def _reply_file_content(self, filename: Path) -> T.Dict[str, T.Any]: + real_path = self.reply_dir / filename + if not real_path.exists(): + raise CMakeException(f'File "{real_path}" does not exist') + + data = json.loads(real_path.read_text(encoding='utf-8')) + assert isinstance(data, dict) + for i in data.keys(): + assert isinstance(i, str) + return data diff --git a/vendored-meson/meson/mesonbuild/cmake/generator.py b/vendored-meson/meson/mesonbuild/cmake/generator.py new file mode 100644 index 000000000000..7903dd49f530 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/cmake/generator.py @@ -0,0 +1,196 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from .. import mesonlib +from .. import mlog +from .common import cmake_is_debug +import typing as T + +if T.TYPE_CHECKING: + from .traceparser import CMakeTraceParser, CMakeTarget + +def parse_generator_expressions( + raw: str, + trace: 'CMakeTraceParser', + *, + context_tgt: T.Optional['CMakeTarget'] = None, + ) -> str: + '''Parse CMake generator expressions + + Most generator expressions are simply ignored for + simplicety, however some are required for some common + use cases. + ''' + + # Early abort if no generator expression present + if '$<' not in raw: + return raw + + out = '' # type: str + i = 0 # type: int + + def equal(arg: str) -> str: + col_pos = arg.find(',') + if col_pos < 0: + return '0' + else: + return '1' if arg[:col_pos] == arg[col_pos + 1:] else '0' + + def vers_comp(op: str, arg: str) -> str: + col_pos = arg.find(',') + if col_pos < 0: + return '0' + else: + return '1' if mesonlib.version_compare(arg[:col_pos], '{}{}'.format(op, arg[col_pos + 1:])) else '0' + + def target_property(arg: str) -> str: + # We can't really support this since we don't have any context + if ',' not in arg: + if context_tgt is None: + return '' + return ';'.join(context_tgt.properties.get(arg, [])) + + args = arg.split(',') + props = trace.targets[args[0]].properties.get(args[1], []) if args[0] in trace.targets else [] + return ';'.join(props) + + def target_file(arg: str) -> str: + if arg not in trace.targets: + mlog.warning(f"Unable to evaluate the cmake variable '$'.") + return '' + tgt = trace.targets[arg] + + cfgs = [] + cfg = '' + + if 'IMPORTED_CONFIGURATIONS' in tgt.properties: + cfgs = [x for x in tgt.properties['IMPORTED_CONFIGURATIONS'] if x] + cfg = cfgs[0] + + if cmake_is_debug(trace.env): + if 'DEBUG' in cfgs: + cfg = 'DEBUG' + elif 'RELEASE' in cfgs: + cfg = 'RELEASE' + else: + if 'RELEASE' in cfgs: + cfg = 'RELEASE' + + if f'IMPORTED_IMPLIB_{cfg}' in tgt.properties: + return ';'.join([x for x in tgt.properties[f'IMPORTED_IMPLIB_{cfg}'] if x]) + elif 'IMPORTED_IMPLIB' in tgt.properties: + return ';'.join([x for x in tgt.properties['IMPORTED_IMPLIB'] if x]) + elif f'IMPORTED_LOCATION_{cfg}' in tgt.properties: + return ';'.join([x for x in tgt.properties[f'IMPORTED_LOCATION_{cfg}'] if x]) + elif 'IMPORTED_LOCATION' in tgt.properties: + return ';'.join([x for x in tgt.properties['IMPORTED_LOCATION'] if x]) + return '' + + supported = { + # Boolean functions + 'BOOL': lambda x: '0' if x.upper() in {'0', 'FALSE', 'OFF', 'N', 'NO', 'IGNORE', 'NOTFOUND'} or x.endswith('-NOTFOUND') else '1', + 'AND': lambda x: '1' if all(y == '1' for y in x.split(',')) else '0', + 'OR': lambda x: '1' if any(y == '1' for y in x.split(',')) else '0', + 'NOT': lambda x: '0' if x == '1' else '1', + + 'IF': lambda x: x.split(',')[1] if x.split(',')[0] == '1' else x.split(',')[2], + + '0': lambda x: '', + '1': lambda x: x, + + # String operations + 'STREQUAL': equal, + 'EQUAL': equal, + 'VERSION_LESS': lambda x: vers_comp('<', x), + 'VERSION_GREATER': lambda x: vers_comp('>', x), + 'VERSION_EQUAL': lambda x: vers_comp('=', x), + 'VERSION_LESS_EQUAL': lambda x: vers_comp('<=', x), + 'VERSION_GREATER_EQUAL': lambda x: vers_comp('>=', x), + + # String modification + 'LOWER_CASE': lambda x: x.lower(), + 'UPPER_CASE': lambda x: x.upper(), + + # Always assume the BUILD_INTERFACE is valid. + # INSTALL_INTERFACE is always invalid for subprojects and + # it should also never appear in CMake config files, used + # for dependencies + 'INSTALL_INTERFACE': lambda x: '', + 'BUILD_INTERFACE': lambda x: x, + + # Constants + 'ANGLE-R': lambda x: '>', + 'COMMA': lambda x: ',', + 'SEMICOLON': lambda x: ';', + + # Target related expressions + 'TARGET_EXISTS': lambda x: '1' if x in trace.targets else '0', + 'TARGET_NAME_IF_EXISTS': lambda x: x if x in trace.targets else '', + 'TARGET_PROPERTY': target_property, + 'TARGET_FILE': target_file, + } # type: T.Dict[str, T.Callable[[str], str]] + + # Recursively evaluate generator expressions + def eval_generator_expressions() -> str: + nonlocal i + i += 2 + + func = '' # type: str + args = '' # type: str + res = '' # type: str + exp = '' # type: str + + # Determine the body of the expression + while i < len(raw): + if raw[i] == '>': + # End of the generator expression + break + elif i < len(raw) - 1 and raw[i] == '$' and raw[i + 1] == '<': + # Nested generator expression + exp += eval_generator_expressions() + else: + # Generator expression body + exp += raw[i] + + i += 1 + + # Split the expression into a function and arguments part + col_pos = exp.find(':') + if col_pos < 0: + func = exp + else: + func = exp[:col_pos] + args = exp[col_pos + 1:] + + func = func.strip() + args = args.strip() + + # Evaluate the function + if func in supported: + res = supported[func](args) + + return res + + while i < len(raw): + if i < len(raw) - 1 and raw[i] == '$' and raw[i + 1] == '<': + # Generator expression detected --> try resolving it + out += eval_generator_expressions() + else: + # Normal string, leave unchanged + out += raw[i] + + i += 1 + + return out diff --git a/vendored-meson/meson/mesonbuild/cmake/interpreter.py b/vendored-meson/meson/mesonbuild/cmake/interpreter.py new file mode 100644 index 000000000000..f88d091ab15b --- /dev/null +++ b/vendored-meson/meson/mesonbuild/cmake/interpreter.py @@ -0,0 +1,1266 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool. +from __future__ import annotations + +from functools import lru_cache +from os import environ +from pathlib import Path +import re +import typing as T + +from .common import CMakeException, CMakeTarget, language_map, cmake_get_generator_args, check_cmake_args +from .fileapi import CMakeFileAPI +from .executor import CMakeExecutor +from .toolchain import CMakeToolchain, CMakeExecScope +from .traceparser import CMakeTraceParser +from .tracetargets import resolve_cmake_trace_targets +from .. import mlog, mesonlib +from ..mesonlib import MachineChoice, OrderedSet, path_is_in_root, relative_to_if_possible, OptionKey +from ..mesondata import DataFile +from ..compilers.compilers import assembler_suffixes, lang_suffixes, header_suffixes, obj_suffixes, lib_suffixes, is_header +from ..programs import ExternalProgram +from ..coredata import FORBIDDEN_TARGET_NAMES +from ..mparser import ( + Token, + BaseNode, + CodeBlockNode, + FunctionNode, + ArrayNode, + ArgumentNode, + AssignmentNode, + BooleanNode, + StringNode, + IdNode, + IndexNode, + MethodNode, + NumberNode, +) + + +if T.TYPE_CHECKING: + from .common import CMakeConfiguration, TargetOptions + from .traceparser import CMakeGeneratorTarget + from .._typing import ImmutableListProtocol + from ..build import Build + from ..backend.backends import Backend + from ..environment import Environment + + TYPE_mixed = T.Union[str, int, bool, Path, BaseNode] + TYPE_mixed_list = T.Union[TYPE_mixed, T.Sequence[TYPE_mixed]] + TYPE_mixed_kwargs = T.Dict[str, TYPE_mixed_list] + +# Disable all warnings automatically enabled with --trace and friends +# See https://cmake.org/cmake/help/latest/variable/CMAKE_POLICY_WARNING_CMPNNNN.html +disable_policy_warnings = [ + 'CMP0025', + 'CMP0047', + 'CMP0056', + 'CMP0060', + 'CMP0065', + 'CMP0066', + 'CMP0067', + 'CMP0082', + 'CMP0089', + 'CMP0102', +] + +target_type_map = { + 'STATIC_LIBRARY': 'static_library', + 'MODULE_LIBRARY': 'shared_module', + 'SHARED_LIBRARY': 'shared_library', + 'EXECUTABLE': 'executable', + 'OBJECT_LIBRARY': 'static_library', + 'INTERFACE_LIBRARY': 'header_only' +} + +skip_targets = ['UTILITY'] + +blacklist_compiler_flags = [ + '-Wall', '-Wextra', '-Weverything', '-Werror', '-Wpedantic', '-pedantic', '-w', + '/W1', '/W2', '/W3', '/W4', '/Wall', '/WX', '/w', + '/O1', '/O2', '/Ob', '/Od', '/Og', '/Oi', '/Os', '/Ot', '/Ox', '/Oy', '/Ob0', + '/RTC1', '/RTCc', '/RTCs', '/RTCu', + '/Z7', '/Zi', '/ZI', +] + +blacklist_link_flags = [ + '/machine:x64', '/machine:x86', '/machine:arm', '/machine:ebc', + '/debug', '/debug:fastlink', '/debug:full', '/debug:none', + '/incremental', +] + +blacklist_clang_cl_link_flags = ['/GR', '/EHsc', '/MDd', '/Zi', '/RTC1'] + +blacklist_link_libs = [ + 'kernel32.lib', + 'user32.lib', + 'gdi32.lib', + 'winspool.lib', + 'shell32.lib', + 'ole32.lib', + 'oleaut32.lib', + 'uuid.lib', + 'comdlg32.lib', + 'advapi32.lib' +] + +transfer_dependencies_from = ['header_only'] + +_cmake_name_regex = re.compile(r'[^_a-zA-Z0-9]') +def _sanitize_cmake_name(name: str) -> str: + name = _cmake_name_regex.sub('_', name) + if name in FORBIDDEN_TARGET_NAMES or name.startswith('meson'): + name = 'cm_' + name + return name + +class OutputTargetMap: + rm_so_version = re.compile(r'(\.[0-9]+)+$') + + def __init__(self, build_dir: Path): + self.tgt_map: T.Dict[str, T.Union['ConverterTarget', 'ConverterCustomTarget']] = {} + self.build_dir = build_dir + + def add(self, tgt: T.Union['ConverterTarget', 'ConverterCustomTarget']) -> None: + def assign_keys(keys: T.List[str]) -> None: + for i in [x for x in keys if x]: + self.tgt_map[i] = tgt + keys = [self._target_key(tgt.cmake_name)] + if isinstance(tgt, ConverterTarget): + keys += [tgt.full_name] + keys += [self._rel_artifact_key(x) for x in tgt.artifacts] + keys += [self._base_artifact_key(x) for x in tgt.artifacts] + if isinstance(tgt, ConverterCustomTarget): + keys += [self._rel_generated_file_key(x) for x in tgt.original_outputs] + keys += [self._base_generated_file_key(x) for x in tgt.original_outputs] + assign_keys(keys) + + def _return_first_valid_key(self, keys: T.List[str]) -> T.Optional[T.Union['ConverterTarget', 'ConverterCustomTarget']]: + for i in keys: + if i and i in self.tgt_map: + return self.tgt_map[i] + return None + + def target(self, name: str) -> T.Optional[T.Union['ConverterTarget', 'ConverterCustomTarget']]: + return self._return_first_valid_key([self._target_key(name)]) + + def executable(self, name: str) -> T.Optional['ConverterTarget']: + tgt = self.target(name) + if tgt is None or not isinstance(tgt, ConverterTarget): + return None + if tgt.meson_func() != 'executable': + return None + return tgt + + def artifact(self, name: str) -> T.Optional[T.Union['ConverterTarget', 'ConverterCustomTarget']]: + keys = [] + candidates = [name, OutputTargetMap.rm_so_version.sub('', name)] + for i in lib_suffixes: + if not name.endswith('.' + i): + continue + new_name = name[:-len(i) - 1] + new_name = OutputTargetMap.rm_so_version.sub('', new_name) + candidates += [f'{new_name}.{i}'] + for i in candidates: + keys += [self._rel_artifact_key(Path(i)), Path(i).name, self._base_artifact_key(Path(i))] + return self._return_first_valid_key(keys) + + def generated(self, name: Path) -> T.Optional['ConverterCustomTarget']: + res = self._return_first_valid_key([self._rel_generated_file_key(name), self._base_generated_file_key(name)]) + assert res is None or isinstance(res, ConverterCustomTarget) + return res + + # Utility functions to generate local keys + def _rel_path(self, fname: Path) -> T.Optional[Path]: + try: + return fname.resolve().relative_to(self.build_dir) + except ValueError: + pass + return None + + def _target_key(self, tgt_name: str) -> str: + return f'__tgt_{tgt_name}__' + + def _rel_generated_file_key(self, fname: Path) -> T.Optional[str]: + path = self._rel_path(fname) + return f'__relgen_{path.as_posix()}__' if path else None + + def _base_generated_file_key(self, fname: Path) -> str: + return f'__gen_{fname.name}__' + + def _rel_artifact_key(self, fname: Path) -> T.Optional[str]: + path = self._rel_path(fname) + return f'__relart_{path.as_posix()}__' if path else None + + def _base_artifact_key(self, fname: Path) -> str: + return f'__art_{fname.name}__' + +class ConverterTarget: + def __init__(self, target: CMakeTarget, env: 'Environment', for_machine: MachineChoice) -> None: + self.env = env + self.for_machine = for_machine + self.artifacts = target.artifacts + self.src_dir = target.src_dir + self.build_dir = target.build_dir + self.name = target.name + self.cmake_name = target.name + self.full_name = target.full_name + self.type = target.type + self.install = target.install + self.install_dir: T.Optional[Path] = None + self.link_libraries = target.link_libraries + self.link_flags = target.link_flags + target.link_lang_flags + self.depends_raw: T.List[str] = [] + self.depends: T.List[T.Union[ConverterTarget, ConverterCustomTarget]] = [] + + if target.install_paths: + self.install_dir = target.install_paths[0] + + self.languages: T.Set[str] = set() + self.sources: T.List[Path] = [] + self.generated: T.List[Path] = [] + self.generated_ctgt: T.List[CustomTargetReference] = [] + self.includes: T.List[Path] = [] + self.sys_includes: T.List[Path] = [] + self.link_with: T.List[T.Union[ConverterTarget, ConverterCustomTarget]] = [] + self.object_libs: T.List[ConverterTarget] = [] + self.compile_opts: T.Dict[str, T.List[str]] = {} + self.public_compile_opts: T.List[str] = [] + self.pie = False + + # Project default override options (c_std, cpp_std, etc.) + self.override_options: T.List[str] = [] + + # Convert the target name to a valid meson target name + self.name = _sanitize_cmake_name(self.name) + + self.generated_raw: T.List[Path] = [] + + for i in target.files: + languages: T.Set[str] = set() + src_suffixes: T.Set[str] = set() + + # Insert suffixes + for j in i.sources: + if not j.suffix: + continue + src_suffixes.add(j.suffix[1:]) + + # Determine the meson language(s) + # Extract the default language from the explicit CMake field + lang_cmake_to_meson = {val.lower(): key for key, val in language_map.items()} + languages.add(lang_cmake_to_meson.get(i.language.lower(), 'c')) + + # Determine missing languages from the source suffixes + for sfx in src_suffixes: + for key, val in lang_suffixes.items(): + if sfx in val: + languages.add(key) + break + + # Register the new languages and initialize the compile opts array + for lang in languages: + self.languages.add(lang) + if lang not in self.compile_opts: + self.compile_opts[lang] = [] + + # Add arguments, but avoid duplicates + args = i.flags + args += [f'-D{x}' for x in i.defines] + for lang in languages: + self.compile_opts[lang] += [x for x in args if x not in self.compile_opts[lang]] + + # Handle include directories + self.includes += [x.path for x in i.includes if x.path not in self.includes and not x.isSystem] + self.sys_includes += [x.path for x in i.includes if x.path not in self.sys_includes and x.isSystem] + + # Add sources to the right array + if i.is_generated: + self.generated_raw += i.sources + else: + self.sources += i.sources + + def __repr__(self) -> str: + return f'<{self.__class__.__name__}: {self.name}>' + + std_regex = re.compile(r'([-]{1,2}std=|/std:v?|[-]{1,2}std:)(.*)') + + def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: Path, subdir: Path, install_prefix: Path, trace: CMakeTraceParser) -> None: + # Detect setting the C and C++ standard and do additional compiler args manipulation + for i in ['c', 'cpp']: + if i not in self.compile_opts: + continue + + temp = [] + for j in self.compile_opts[i]: + m = ConverterTarget.std_regex.match(j) + ctgt = output_target_map.generated(Path(j)) + if m: + std = m.group(2) + supported = self._all_lang_stds(i) + if std not in supported: + mlog.warning( + 'Unknown {0}_std "{1}" -> Ignoring. Try setting the project-' + 'level {0}_std if build errors occur. Known ' + '{0}_stds are: {2}'.format(i, std, ' '.join(supported)), + once=True + ) + continue + self.override_options += [f'{i}_std={std}'] + elif j in {'-fPIC', '-fpic', '-fPIE', '-fpie'}: + self.pie = True + elif isinstance(ctgt, ConverterCustomTarget): + # Sometimes projects pass generated source files as compiler + # flags. Add these as generated sources to ensure that the + # corresponding custom target is run.2 + self.generated_raw += [Path(j)] + temp += [j] + elif j in blacklist_compiler_flags: + pass + else: + temp += [j] + + self.compile_opts[i] = temp + + # Make sure to force enable -fPIC for OBJECT libraries + if self.type.upper() == 'OBJECT_LIBRARY': + self.pie = True + + # Use the CMake trace, if required + tgt = trace.targets.get(self.cmake_name) + if tgt: + self.depends_raw = trace.targets[self.cmake_name].depends + + rtgt = resolve_cmake_trace_targets(self.cmake_name, trace, self.env) + self.includes += [Path(x) for x in rtgt.include_directories] + self.link_flags += rtgt.link_flags + self.public_compile_opts += rtgt.public_compile_opts + self.link_libraries += rtgt.libraries + + elif self.type.upper() not in ['EXECUTABLE', 'OBJECT_LIBRARY']: + mlog.warning('CMake: Target', mlog.bold(self.cmake_name), 'not found in CMake trace. This can lead to build errors') + + temp = [] + for i in self.link_libraries: + # Let meson handle this arcane magic + if ',-rpath,' in i: + continue + if not Path(i).is_absolute(): + link_with = output_target_map.artifact(i) + if link_with: + self.link_with += [link_with] + continue + + temp += [i] + self.link_libraries = temp + + # Filter out files that are not supported by the language + supported = list(assembler_suffixes) + list(header_suffixes) + list(obj_suffixes) + for i in self.languages: + supported += list(lang_suffixes[i]) + supported = [f'.{x}' for x in supported] + self.sources = [x for x in self.sources if any(x.name.endswith(y) for y in supported)] + self.generated_raw = [x for x in self.generated_raw if any(x.name.endswith(y) for y in supported)] + + # Make paths relative + def rel_path(x: Path, is_header: bool, is_generated: bool) -> T.Optional[Path]: + if not x.is_absolute(): + x = self.src_dir / x + x = x.resolve() + assert x.is_absolute() + if not x.exists() and not any(x.name.endswith(y) for y in obj_suffixes) and not is_generated: + if path_is_in_root(x, Path(self.env.get_build_dir()), resolve=True): + x.mkdir(parents=True, exist_ok=True) + return x.relative_to(Path(self.env.get_build_dir()) / subdir) + else: + mlog.warning('CMake: path', mlog.bold(x.as_posix()), 'does not exist.') + mlog.warning(' --> Ignoring. This can lead to build errors.') + return None + if x in trace.explicit_headers: + return None + if ( + path_is_in_root(x, Path(self.env.get_source_dir())) + and not ( + path_is_in_root(x, root_src_dir) or + path_is_in_root(x, Path(self.env.get_build_dir())) + ) + ): + mlog.warning('CMake: path', mlog.bold(x.as_posix()), 'is inside the root project but', mlog.bold('not'), 'inside the subproject.') + mlog.warning(' --> Ignoring. This can lead to build errors.') + return None + if path_is_in_root(x, Path(self.env.get_build_dir())) and is_header: + return x.relative_to(Path(self.env.get_build_dir()) / subdir) + if path_is_in_root(x, root_src_dir): + return x.relative_to(root_src_dir) + return x + + build_dir_rel = self.build_dir.relative_to(Path(self.env.get_build_dir()) / subdir) + self.generated_raw = [rel_path(x, False, True) for x in self.generated_raw] + self.includes = list(OrderedSet([rel_path(x, True, False) for x in OrderedSet(self.includes)] + [build_dir_rel])) + self.sys_includes = list(OrderedSet([rel_path(x, True, False) for x in OrderedSet(self.sys_includes)])) + self.sources = [rel_path(x, False, False) for x in self.sources] + + # Resolve custom targets + for gen_file in self.generated_raw: + ctgt = output_target_map.generated(gen_file) + if ctgt: + assert isinstance(ctgt, ConverterCustomTarget) + ref = ctgt.get_ref(gen_file) + assert isinstance(ref, CustomTargetReference) and ref.valid() + self.generated_ctgt += [ref] + elif gen_file is not None: + self.generated += [gen_file] + + # Remove delete entries + self.includes = [x for x in self.includes if x is not None] + self.sys_includes = [x for x in self.sys_includes if x is not None] + self.sources = [x for x in self.sources if x is not None] + + # Make sure '.' is always in the include directories + if Path('.') not in self.includes: + self.includes += [Path('.')] + + # make install dir relative to the install prefix + if self.install_dir and self.install_dir.is_absolute(): + if path_is_in_root(self.install_dir, install_prefix): + self.install_dir = self.install_dir.relative_to(install_prefix) + + # Remove blacklisted options and libs + def check_flag(flag: str) -> bool: + if flag.lower() in blacklist_link_flags or flag in blacklist_compiler_flags + blacklist_clang_cl_link_flags: + return False + if flag.startswith('/D'): + return False + return True + + self.link_libraries = [x for x in self.link_libraries if x.lower() not in blacklist_link_libs] + self.link_flags = [x for x in self.link_flags if check_flag(x)] + + # Handle OSX frameworks + def handle_frameworks(flags: T.List[str]) -> T.List[str]: + res: T.List[str] = [] + for i in flags: + p = Path(i) + if not p.exists() or not p.name.endswith('.framework'): + res += [i] + continue + res += ['-framework', p.stem] + return res + + self.link_libraries = handle_frameworks(self.link_libraries) + self.link_flags = handle_frameworks(self.link_flags) + + # Handle explicit CMake add_dependency() calls + for i in self.depends_raw: + dep_tgt = output_target_map.target(i) + if dep_tgt: + self.depends.append(dep_tgt) + + def process_object_libs(self, obj_target_list: T.List['ConverterTarget'], linker_workaround: bool) -> None: + # Try to detect the object library(s) from the generated input sources + temp = [x for x in self.generated if any(x.name.endswith('.' + y) for y in obj_suffixes)] + stem = [x.stem for x in temp] + exts = self._all_source_suffixes() + # Temp now stores the source filenames of the object files + for i in obj_target_list: + source_files = [x.name for x in i.sources + i.generated] + for j in stem: + # On some platforms (specifically looking at you Windows with vs20xy backend) CMake does + # not produce object files with the format `foo.cpp.obj`, instead it skipps the language + # suffix and just produces object files like `foo.obj`. Thus we have to do our best to + # undo this step and guess the correct language suffix of the object file. This is done + # by trying all language suffixes meson knows and checking if one of them fits. + candidates = [j] + if not any(j.endswith('.' + x) for x in exts): + mlog.warning('Object files do not contain source file extensions, thus falling back to guessing them.', once=True) + candidates += [f'{j}.{x}' for x in exts] + if any(x in source_files for x in candidates): + if linker_workaround: + self._append_objlib_sources(i) + else: + self.includes += i.includes + self.includes = list(OrderedSet(self.includes)) + self.object_libs += [i] + break + + # Filter out object files from the sources + self.generated = [x for x in self.generated if not any(x.name.endswith('.' + y) for y in obj_suffixes)] + + def _append_objlib_sources(self, tgt: 'ConverterTarget') -> None: + self.includes += tgt.includes + self.sources += tgt.sources + self.generated += tgt.generated + self.generated_ctgt += tgt.generated_ctgt + self.includes = list(OrderedSet(self.includes)) + self.sources = list(OrderedSet(self.sources)) + self.generated = list(OrderedSet(self.generated)) + self.generated_ctgt = list(OrderedSet(self.generated_ctgt)) + + # Inherit compiler arguments since they may be required for building + for lang, opts in tgt.compile_opts.items(): + if lang not in self.compile_opts: + self.compile_opts[lang] = [] + self.compile_opts[lang] += [x for x in opts if x not in self.compile_opts[lang]] + + @lru_cache(maxsize=None) + def _all_source_suffixes(self) -> 'ImmutableListProtocol[str]': + suffixes: T.List[str] = [] + for exts in lang_suffixes.values(): + suffixes.extend(exts) + return suffixes + + @lru_cache(maxsize=None) + def _all_lang_stds(self, lang: str) -> 'ImmutableListProtocol[str]': + try: + res = self.env.coredata.options[OptionKey('std', machine=MachineChoice.BUILD, lang=lang)].choices + except KeyError: + return [] + + # TODO: Get rid of this once we have proper typing for options + assert isinstance(res, list) + for i in res: + assert isinstance(i, str) + + return res + + def process_inter_target_dependencies(self) -> None: + # Move the dependencies from all transfer_dependencies_from to the target + to_process = list(self.depends) + processed = [] + new_deps = [] + for i in to_process: + processed += [i] + if isinstance(i, ConverterTarget) and i.meson_func() in transfer_dependencies_from: + to_process += [x for x in i.depends if x not in processed] + else: + new_deps += [i] + self.depends = list(OrderedSet(new_deps)) + + def cleanup_dependencies(self) -> None: + # Clear the dependencies from targets that where moved from + if self.meson_func() in transfer_dependencies_from: + self.depends = [] + + def meson_func(self) -> str: + return target_type_map.get(self.type.upper()) + + def log(self) -> None: + mlog.log('Target', mlog.bold(self.name), f'({self.cmake_name})') + mlog.log(' -- artifacts: ', mlog.bold(str(self.artifacts))) + mlog.log(' -- full_name: ', mlog.bold(self.full_name)) + mlog.log(' -- type: ', mlog.bold(self.type)) + mlog.log(' -- install: ', mlog.bold('true' if self.install else 'false')) + mlog.log(' -- install_dir: ', mlog.bold(self.install_dir.as_posix() if self.install_dir else '')) + mlog.log(' -- link_libraries: ', mlog.bold(str(self.link_libraries))) + mlog.log(' -- link_with: ', mlog.bold(str(self.link_with))) + mlog.log(' -- object_libs: ', mlog.bold(str(self.object_libs))) + mlog.log(' -- link_flags: ', mlog.bold(str(self.link_flags))) + mlog.log(' -- languages: ', mlog.bold(str(self.languages))) + mlog.log(' -- includes: ', mlog.bold(str(self.includes))) + mlog.log(' -- sys_includes: ', mlog.bold(str(self.sys_includes))) + mlog.log(' -- sources: ', mlog.bold(str(self.sources))) + mlog.log(' -- generated: ', mlog.bold(str(self.generated))) + mlog.log(' -- generated_ctgt: ', mlog.bold(str(self.generated_ctgt))) + mlog.log(' -- pie: ', mlog.bold('true' if self.pie else 'false')) + mlog.log(' -- override_opts: ', mlog.bold(str(self.override_options))) + mlog.log(' -- depends: ', mlog.bold(str(self.depends))) + mlog.log(' -- options:') + for key, val in self.compile_opts.items(): + mlog.log(' -', key, '=', mlog.bold(str(val))) + +class CustomTargetReference: + def __init__(self, ctgt: 'ConverterCustomTarget', index: int) -> None: + self.ctgt = ctgt + self.index = index + + def __repr__(self) -> str: + if self.valid(): + return '<{}: {} [{}]>'.format(self.__class__.__name__, self.ctgt.name, self.ctgt.outputs[self.index]) + else: + return f'<{self.__class__.__name__}: INVALID REFERENCE>' + + def valid(self) -> bool: + return self.ctgt is not None and self.index >= 0 + + def filename(self) -> str: + return self.ctgt.outputs[self.index] + +class ConverterCustomTarget: + tgt_counter = 0 + out_counter = 0 + + def __init__(self, target: CMakeGeneratorTarget, env: 'Environment', for_machine: MachineChoice) -> None: + assert target.current_bin_dir is not None + assert target.current_src_dir is not None + self.name = target.name + if not self.name: + self.name = f'custom_tgt_{ConverterCustomTarget.tgt_counter}' + ConverterCustomTarget.tgt_counter += 1 + self.cmake_name = str(self.name) + self.original_outputs = list(target.outputs) + self.outputs = [x.name for x in self.original_outputs] + self.conflict_map: T.Dict[str, str] = {} + self.command: T.List[T.List[T.Union[str, ConverterTarget]]] = [] + self.working_dir = target.working_dir + self.depends_raw = target.depends + self.inputs: T.List[T.Union[str, CustomTargetReference]] = [] + self.depends: T.List[T.Union[ConverterTarget, ConverterCustomTarget]] = [] + self.current_bin_dir = target.current_bin_dir + self.current_src_dir = target.current_src_dir + self.env = env + self.for_machine = for_machine + self._raw_target = target + + # Convert the target name to a valid meson target name + self.name = _sanitize_cmake_name(self.name) + + def __repr__(self) -> str: + return f'<{self.__class__.__name__}: {self.name} {self.outputs}>' + + def postprocess(self, output_target_map: OutputTargetMap, root_src_dir: Path, all_outputs: T.List[str], trace: CMakeTraceParser) -> None: + # Default the working directory to ${CMAKE_CURRENT_BINARY_DIR} + if self.working_dir is None: + self.working_dir = self.current_bin_dir + + # relative paths in the working directory are always relative + # to ${CMAKE_CURRENT_BINARY_DIR} + if not self.working_dir.is_absolute(): + self.working_dir = self.current_bin_dir / self.working_dir + + # Modify the original outputs if they are relative. Again, + # relative paths are relative to ${CMAKE_CURRENT_BINARY_DIR} + def ensure_absolute(x: Path) -> Path: + if x.is_absolute(): + return x + else: + return self.current_bin_dir / x + self.original_outputs = [ensure_absolute(x) for x in self.original_outputs] + + # Ensure that there is no duplicate output in the project so + # that meson can handle cases where the same filename is + # generated in multiple directories + temp_outputs: T.List[str] = [] + for i in self.outputs: + if i in all_outputs: + old = str(i) + i = f'c{ConverterCustomTarget.out_counter}_{i}' + ConverterCustomTarget.out_counter += 1 + self.conflict_map[old] = i + all_outputs += [i] + temp_outputs += [i] + self.outputs = temp_outputs + + # Check if the command is a build target + commands: T.List[T.List[T.Union[str, ConverterTarget]]] = [] + for curr_cmd in self._raw_target.command: + assert isinstance(curr_cmd, list) + assert curr_cmd[0] != '', "An empty string is not a valid executable" + cmd: T.List[T.Union[str, ConverterTarget]] = [] + + for j in curr_cmd: + if not j: + continue + target = output_target_map.executable(j) + if target: + # When cross compiling, binaries have to be executed with an exe_wrapper (for instance wine for mingw-w64) + if self.env.exe_wrapper is not None and self.env.properties[self.for_machine].get_cmake_use_exe_wrapper(): + assert isinstance(self.env.exe_wrapper, ExternalProgram) + cmd += self.env.exe_wrapper.get_command() + cmd += [target] + continue + elif j in trace.targets: + trace_tgt = trace.targets[j] + if trace_tgt.type == 'EXECUTABLE' and 'IMPORTED_LOCATION' in trace_tgt.properties: + cmd += trace_tgt.properties['IMPORTED_LOCATION'] + continue + mlog.debug(f'CMake: Found invalid CMake target "{j}" --> ignoring \n{trace_tgt}') + + # Fallthrough on error + cmd += [j] + + commands += [cmd] + self.command = commands + + # If the custom target does not declare any output, create a dummy + # one that can be used as dependency. + if not self.outputs: + self.outputs = [self.name + '.h'] + + # Check dependencies and input files + for i in self.depends_raw: + if not i: + continue + raw = Path(i) + art = output_target_map.artifact(i) + tgt = output_target_map.target(i) + gen = output_target_map.generated(raw) + + rel_to_root = None + try: + rel_to_root = raw.relative_to(root_src_dir) + except ValueError: + rel_to_root = None + + # First check for existing files. Only then check for existing + # targets, etc. This reduces the chance of misdetecting input files + # as outputs from other targets. + # See https://github.com/mesonbuild/meson/issues/6632 + if not raw.is_absolute() and (self.current_src_dir / raw).is_file(): + self.inputs += [(self.current_src_dir / raw).relative_to(root_src_dir).as_posix()] + elif raw.is_absolute() and raw.exists() and rel_to_root is not None: + self.inputs += [rel_to_root.as_posix()] + elif art: + self.depends += [art] + elif tgt: + self.depends += [tgt] + elif gen: + ctgt_ref = gen.get_ref(raw) + assert ctgt_ref is not None + self.inputs += [ctgt_ref] + + def process_inter_target_dependencies(self) -> None: + # Move the dependencies from all transfer_dependencies_from to the target + to_process = list(self.depends) + processed = [] + new_deps = [] + for i in to_process: + processed += [i] + if isinstance(i, ConverterTarget) and i.meson_func() in transfer_dependencies_from: + to_process += [x for x in i.depends if x not in processed] + else: + new_deps += [i] + self.depends = list(OrderedSet(new_deps)) + + def get_ref(self, fname: Path) -> T.Optional[CustomTargetReference]: + name = fname.name + try: + if name in self.conflict_map: + name = self.conflict_map[name] + idx = self.outputs.index(name) + return CustomTargetReference(self, idx) + except ValueError: + return None + + def log(self) -> None: + mlog.log('Custom Target', mlog.bold(self.name), f'({self.cmake_name})') + mlog.log(' -- command: ', mlog.bold(str(self.command))) + mlog.log(' -- outputs: ', mlog.bold(str(self.outputs))) + mlog.log(' -- conflict_map: ', mlog.bold(str(self.conflict_map))) + mlog.log(' -- working_dir: ', mlog.bold(str(self.working_dir))) + mlog.log(' -- depends_raw: ', mlog.bold(str(self.depends_raw))) + mlog.log(' -- inputs: ', mlog.bold(str(self.inputs))) + mlog.log(' -- depends: ', mlog.bold(str(self.depends))) + +class CMakeInterpreter: + def __init__(self, build: 'Build', subdir: Path, src_dir: Path, install_prefix: Path, env: 'Environment', backend: 'Backend'): + self.build = build + self.subdir = subdir + self.src_dir = src_dir + self.build_dir_rel = subdir / '__CMake_build' + self.build_dir = Path(env.get_build_dir()) / self.build_dir_rel + self.install_prefix = install_prefix + self.env = env + self.for_machine = MachineChoice.HOST # TODO make parameter + self.backend_name = backend.name + self.linkers: T.Set[str] = set() + self.fileapi = CMakeFileAPI(self.build_dir) + + # Raw CMake results + self.bs_files: T.List[Path] = [] + self.codemodel_configs: T.Optional[T.List[CMakeConfiguration]] = None + self.cmake_stderr: T.Optional[str] = None + + # Analysed data + self.project_name = '' + self.languages: T.List[str] = [] + self.targets: T.List[ConverterTarget] = [] + self.custom_targets: T.List[ConverterCustomTarget] = [] + self.trace: CMakeTraceParser + self.output_target_map = OutputTargetMap(self.build_dir) + + # Generated meson data + self.generated_targets: T.Dict[str, T.Dict[str, T.Optional[str]]] = {} + self.internal_name_map: T.Dict[str, str] = {} + + # Do some special handling for object libraries for certain configurations + self._object_lib_workaround = False + if self.backend_name.startswith('vs'): + for comp in self.env.coredata.compilers[self.for_machine].values(): + if comp.get_linker_id() == 'link': + self._object_lib_workaround = True + break + + def configure(self, extra_cmake_options: T.List[str]) -> CMakeExecutor: + # Find CMake + # TODO: Using MachineChoice.BUILD should always be correct here, but also evaluate the use of self.for_machine + cmake_exe = CMakeExecutor(self.env, '>=3.14', MachineChoice.BUILD) + if not cmake_exe.found(): + raise CMakeException('Unable to find CMake') + self.trace = CMakeTraceParser(cmake_exe.version(), self.build_dir, self.env, permissive=True) + + preload_file = DataFile('cmake/data/preload.cmake').write_to_private(self.env) + toolchain = CMakeToolchain(cmake_exe, self.env, self.for_machine, CMakeExecScope.SUBPROJECT, self.build_dir, preload_file) + toolchain_file = toolchain.write() + + # TODO: drop this check once the deprecated `cmake_args` kwarg is removed + extra_cmake_options = check_cmake_args(extra_cmake_options) + + cmake_args = [] + cmake_args += cmake_get_generator_args(self.env) + cmake_args += [f'-DCMAKE_INSTALL_PREFIX={self.install_prefix}'] + cmake_args += extra_cmake_options + trace_args = self.trace.trace_args() + cmcmp_args = [f'-DCMAKE_POLICY_WARNING_{x}=OFF' for x in disable_policy_warnings] + + self.fileapi.setup_request() + + # Run CMake + mlog.log() + with mlog.nested(): + mlog.log('Configuring the build directory with', mlog.bold('CMake'), 'version', mlog.cyan(cmake_exe.version())) + mlog.log(mlog.bold('Running CMake with:'), ' '.join(cmake_args)) + mlog.log(mlog.bold(' - build directory: '), self.build_dir.as_posix()) + mlog.log(mlog.bold(' - source directory: '), self.src_dir.as_posix()) + mlog.log(mlog.bold(' - toolchain file: '), toolchain_file.as_posix()) + mlog.log(mlog.bold(' - preload file: '), preload_file.as_posix()) + mlog.log(mlog.bold(' - trace args: '), ' '.join(trace_args)) + mlog.log(mlog.bold(' - disabled policy warnings:'), '[{}]'.format(', '.join(disable_policy_warnings))) + mlog.log() + self.build_dir.mkdir(parents=True, exist_ok=True) + os_env = environ.copy() + os_env['LC_ALL'] = 'C' + final_args = cmake_args + trace_args + cmcmp_args + toolchain.get_cmake_args() + [self.src_dir.as_posix()] + + cmake_exe.set_exec_mode(print_cmout=True, always_capture_stderr=self.trace.requires_stderr()) + rc, _, self.cmake_stderr = cmake_exe.call(final_args, self.build_dir, env=os_env, disable_cache=True) + + mlog.log() + h = mlog.green('SUCCEEDED') if rc == 0 else mlog.red('FAILED') + mlog.log('CMake configuration:', h) + if rc != 0: + # get the last CMake error - We only need the message function for this: + self.trace.functions = {'message': self.trace.functions['message']} + self.trace.parse(self.cmake_stderr) + error = f': {self.trace.errors[-1]}' if self.trace.errors else '' + raise CMakeException(f'Failed to configure the CMake subproject{error}') + + return cmake_exe + + def initialise(self, extra_cmake_options: T.List[str]) -> None: + # Configure the CMake project to generate the file API data + self.configure(extra_cmake_options) + + # Parse the result + self.fileapi.load_reply() + + # Load the buildsystem file list + cmake_files = self.fileapi.get_cmake_sources() + self.bs_files = [x.file for x in cmake_files if not x.is_cmake and not x.is_temp] + self.bs_files = [relative_to_if_possible(x, Path(self.env.get_source_dir())) for x in self.bs_files] + self.bs_files = [x for x in self.bs_files if not path_is_in_root(x, Path(self.env.get_build_dir()), resolve=True)] + self.bs_files = list(OrderedSet(self.bs_files)) + + # Load the codemodel configurations + self.codemodel_configs = self.fileapi.get_cmake_configurations() + + def analyse(self) -> None: + if self.codemodel_configs is None: + raise CMakeException('CMakeInterpreter was not initialized') + + # Clear analyser data + self.project_name = '' + self.languages = [] + self.targets = [] + self.custom_targets = [] + + # Parse the trace + self.trace.parse(self.cmake_stderr) + + # Find all targets + added_target_names: T.List[str] = [] + for i_0 in self.codemodel_configs: + for j_0 in i_0.projects: + if not self.project_name: + self.project_name = j_0.name + for k_0 in j_0.targets: + # Avoid duplicate targets from different configurations and known + # dummy CMake internal target types + if k_0.type not in skip_targets and k_0.name not in added_target_names: + added_target_names += [k_0.name] + self.targets += [ConverterTarget(k_0, self.env, self.for_machine)] + + # Add interface targets from trace, if not already present. + # This step is required because interface targets were removed from + # the CMake file API output. + api_target_name_list = [x.name for x in self.targets] + for i_1 in self.trace.targets.values(): + if i_1.type != 'INTERFACE' or i_1.name in api_target_name_list or i_1.imported: + continue + dummy = CMakeTarget({ + 'name': i_1.name, + 'type': 'INTERFACE_LIBRARY', + 'sourceDirectory': self.src_dir, + 'buildDirectory': self.build_dir, + }) + self.targets += [ConverterTarget(dummy, self.env, self.for_machine)] + + for i_2 in self.trace.custom_targets: + self.custom_targets += [ConverterCustomTarget(i_2, self.env, self.for_machine)] + + # generate the output_target_map + for i_3 in [*self.targets, *self.custom_targets]: + assert isinstance(i_3, (ConverterTarget, ConverterCustomTarget)) + self.output_target_map.add(i_3) + + # First pass: Basic target cleanup + object_libs = [] + custom_target_outputs: T.List[str] = [] + for ctgt in self.custom_targets: + ctgt.postprocess(self.output_target_map, self.src_dir, custom_target_outputs, self.trace) + for tgt in self.targets: + tgt.postprocess(self.output_target_map, self.src_dir, self.subdir, self.install_prefix, self.trace) + if tgt.type == 'OBJECT_LIBRARY': + object_libs += [tgt] + self.languages += [x for x in tgt.languages if x not in self.languages] + + # Second pass: Detect object library dependencies + for tgt in self.targets: + tgt.process_object_libs(object_libs, self._object_lib_workaround) + + # Third pass: Reassign dependencies to avoid some loops + for tgt in self.targets: + tgt.process_inter_target_dependencies() + for ctgt in self.custom_targets: + ctgt.process_inter_target_dependencies() + + # Fourth pass: Remove rassigned dependencies + for tgt in self.targets: + tgt.cleanup_dependencies() + + mlog.log('CMake project', mlog.bold(self.project_name), 'has', mlog.bold(str(len(self.targets) + len(self.custom_targets))), 'build targets.') + + def pretend_to_be_meson(self, options: TargetOptions) -> CodeBlockNode: + if not self.project_name: + raise CMakeException('CMakeInterpreter was not analysed') + + def token(tid: str = 'string', val: TYPE_mixed = '') -> Token: + return Token(tid, self.subdir.as_posix(), 0, 0, 0, None, val) + + def string(value: str) -> StringNode: + return StringNode(token(val=value)) + + def id_node(value: str) -> IdNode: + return IdNode(token(val=value)) + + def number(value: int) -> NumberNode: + return NumberNode(token(val=value)) + + def nodeify(value: TYPE_mixed_list) -> BaseNode: + if isinstance(value, str): + return string(value) + if isinstance(value, Path): + return string(value.as_posix()) + elif isinstance(value, bool): + return BooleanNode(token(val=value)) + elif isinstance(value, int): + return number(value) + elif isinstance(value, list): + return array(value) + elif isinstance(value, BaseNode): + return value + raise RuntimeError('invalid type of value: {} ({})'.format(type(value).__name__, str(value))) + + def indexed(node: BaseNode, index: int) -> IndexNode: + return IndexNode(node, nodeify(index)) + + def array(elements: TYPE_mixed_list) -> ArrayNode: + args = ArgumentNode(token()) + if not isinstance(elements, list): + elements = [args] + args.arguments += [nodeify(x) for x in elements if x is not None] + return ArrayNode(args, 0, 0, 0, 0) + + def function(name: str, args: T.Optional[TYPE_mixed_list] = None, kwargs: T.Optional[TYPE_mixed_kwargs] = None) -> FunctionNode: + args = [] if args is None else args + kwargs = {} if kwargs is None else kwargs + args_n = ArgumentNode(token()) + if not isinstance(args, list): + assert isinstance(args, (str, int, bool, Path, BaseNode)) + args = [args] + args_n.arguments = [nodeify(x) for x in args if x is not None] + args_n.kwargs = {id_node(k): nodeify(v) for k, v in kwargs.items() if v is not None} + func_n = FunctionNode(self.subdir.as_posix(), 0, 0, 0, 0, name, args_n) + return func_n + + def method(obj: BaseNode, name: str, args: T.Optional[TYPE_mixed_list] = None, kwargs: T.Optional[TYPE_mixed_kwargs] = None) -> MethodNode: + args = [] if args is None else args + kwargs = {} if kwargs is None else kwargs + args_n = ArgumentNode(token()) + if not isinstance(args, list): + assert isinstance(args, (str, int, bool, Path, BaseNode)) + args = [args] + args_n.arguments = [nodeify(x) for x in args if x is not None] + args_n.kwargs = {id_node(k): nodeify(v) for k, v in kwargs.items() if v is not None} + return MethodNode(self.subdir.as_posix(), 0, 0, obj, name, args_n) + + def assign(var_name: str, value: BaseNode) -> AssignmentNode: + return AssignmentNode(self.subdir.as_posix(), 0, 0, var_name, value) + + # Generate the root code block and the project function call + root_cb = CodeBlockNode(token()) + root_cb.lines += [function('project', [self.project_name] + self.languages)] + + # Add the run script for custom commands + + # Add the targets + processing: T.List[str] = [] + processed: T.Dict[str, T.Dict[str, T.Optional[str]]] = {} + name_map: T.Dict[str, str] = {} + + def extract_tgt(tgt: T.Union[ConverterTarget, ConverterCustomTarget, CustomTargetReference]) -> IdNode: + tgt_name = None + if isinstance(tgt, (ConverterTarget, ConverterCustomTarget)): + tgt_name = tgt.name + elif isinstance(tgt, CustomTargetReference): + tgt_name = tgt.ctgt.name + assert tgt_name is not None and tgt_name in processed + res_var = processed[tgt_name]['tgt'] + return id_node(res_var) if res_var else None + + def detect_cycle(tgt: T.Union[ConverterTarget, ConverterCustomTarget]) -> None: + if tgt.name in processing: + raise CMakeException('Cycle in CMake inputs/dependencies detected') + processing.append(tgt.name) + + def resolve_ctgt_ref(ref: CustomTargetReference) -> T.Union[IdNode, IndexNode]: + tgt_var = extract_tgt(ref) + if len(ref.ctgt.outputs) == 1: + return tgt_var + else: + return indexed(tgt_var, ref.index) + + def process_target(tgt: ConverterTarget) -> None: + detect_cycle(tgt) + + # First handle inter target dependencies + link_with: T.List[IdNode] = [] + objec_libs: T.List[IdNode] = [] + sources: T.List[Path] = [] + generated: T.List[T.Union[IdNode, IndexNode]] = [] + generated_filenames: T.List[str] = [] + custom_targets: T.List[ConverterCustomTarget] = [] + dependencies: T.List[IdNode] = [] + for i in tgt.link_with: + assert isinstance(i, ConverterTarget) + if i.name not in processed: + process_target(i) + link_with += [extract_tgt(i)] + for i in tgt.object_libs: + assert isinstance(i, ConverterTarget) + if i.name not in processed: + process_target(i) + objec_libs += [extract_tgt(i)] + for i in tgt.depends: + if not isinstance(i, ConverterCustomTarget): + continue + if i.name not in processed: + process_custom_target(i) + dependencies += [extract_tgt(i)] + + # Generate the source list and handle generated sources + sources += tgt.sources + sources += tgt.generated + + for ctgt_ref in tgt.generated_ctgt: + ctgt = ctgt_ref.ctgt + if ctgt.name not in processed: + process_custom_target(ctgt) + generated += [resolve_ctgt_ref(ctgt_ref)] + generated_filenames += [ctgt_ref.filename()] + if ctgt not in custom_targets: + custom_targets += [ctgt] + + # Add all header files from all used custom targets. This + # ensures that all custom targets are built before any + # sources of the current target are compiled and thus all + # header files are present. This step is necessary because + # CMake always ensures that a custom target is executed + # before another target if at least one output is used. + for ctgt in custom_targets: + for j in ctgt.outputs: + if not is_header(j) or j in generated_filenames: + continue + + generated += [resolve_ctgt_ref(ctgt.get_ref(Path(j)))] + generated_filenames += [j] + + # Determine the meson function to use for the build target + tgt_func = tgt.meson_func() + if not tgt_func: + raise CMakeException(f'Unknown target type "{tgt.type}"') + + # Determine the variable names + inc_var = f'{tgt.name}_inc' + dir_var = f'{tgt.name}_dir' + sys_var = f'{tgt.name}_sys' + src_var = f'{tgt.name}_src' + dep_var = f'{tgt.name}_dep' + tgt_var = tgt.name + + install_tgt = options.get_install(tgt.cmake_name, tgt.install) + + # Generate target kwargs + tgt_kwargs: TYPE_mixed_kwargs = { + 'build_by_default': install_tgt, + 'link_args': options.get_link_args(tgt.cmake_name, tgt.link_flags + tgt.link_libraries), + 'link_with': link_with, + 'include_directories': id_node(inc_var), + 'install': install_tgt, + 'override_options': options.get_override_options(tgt.cmake_name, tgt.override_options), + 'objects': [method(x, 'extract_all_objects') for x in objec_libs], + } + + # Only set if installed and only override if it is set + if install_tgt and tgt.install_dir: + tgt_kwargs['install_dir'] = tgt.install_dir + + # Handle compiler args + for key, val in tgt.compile_opts.items(): + tgt_kwargs[f'{key}_args'] = options.get_compile_args(tgt.cmake_name, key, val) + + # Handle -fPCI, etc + if tgt_func == 'executable': + tgt_kwargs['pie'] = tgt.pie + elif tgt_func == 'static_library': + tgt_kwargs['pic'] = tgt.pie + + # declare_dependency kwargs + dep_kwargs: TYPE_mixed_kwargs = { + 'link_args': tgt.link_flags + tgt.link_libraries, + 'link_with': id_node(tgt_var), + 'compile_args': tgt.public_compile_opts, + 'include_directories': id_node(inc_var), + } + + if dependencies: + generated += dependencies + + # Generate the function nodes + dir_node = assign(dir_var, function('include_directories', tgt.includes)) + sys_node = assign(sys_var, function('include_directories', tgt.sys_includes, {'is_system': True})) + inc_node = assign(inc_var, array([id_node(dir_var), id_node(sys_var)])) + node_list = [dir_node, sys_node, inc_node] + if tgt_func == 'header_only': + del dep_kwargs['link_with'] + dep_node = assign(dep_var, function('declare_dependency', kwargs=dep_kwargs)) + node_list += [dep_node] + src_var = None + tgt_var = None + else: + src_node = assign(src_var, function('files', sources)) + tgt_node = assign(tgt_var, function(tgt_func, [tgt_var, id_node(src_var), *generated], tgt_kwargs)) + node_list += [src_node, tgt_node] + if tgt_func in {'static_library', 'shared_library'}: + dep_node = assign(dep_var, function('declare_dependency', kwargs=dep_kwargs)) + node_list += [dep_node] + elif tgt_func == 'shared_module': + del dep_kwargs['link_with'] + dep_node = assign(dep_var, function('declare_dependency', kwargs=dep_kwargs)) + node_list += [dep_node] + else: + dep_var = None + + # Add the nodes to the ast + root_cb.lines += node_list + processed[tgt.name] = {'inc': inc_var, 'src': src_var, 'dep': dep_var, 'tgt': tgt_var, 'func': tgt_func} + name_map[tgt.cmake_name] = tgt.name + + def process_custom_target(tgt: ConverterCustomTarget) -> None: + # CMake allows to specify multiple commands in a custom target. + # To map this to meson, a helper script is used to execute all + # commands in order. This additionally allows setting the working + # directory. + + detect_cycle(tgt) + tgt_var = tgt.name + + def resolve_source(x: T.Union[str, ConverterTarget, ConverterCustomTarget, CustomTargetReference]) -> T.Union[str, IdNode, IndexNode]: + if isinstance(x, ConverterTarget): + if x.name not in processed: + process_target(x) + return extract_tgt(x) + if isinstance(x, ConverterCustomTarget): + if x.name not in processed: + process_custom_target(x) + return extract_tgt(x) + elif isinstance(x, CustomTargetReference): + if x.ctgt.name not in processed: + process_custom_target(x.ctgt) + return resolve_ctgt_ref(x) + else: + return x + + # Generate the command list + command: T.List[T.Union[str, IdNode, IndexNode]] = [] + command += mesonlib.get_meson_command() + command += ['--internal', 'cmake_run_ctgt'] + command += ['-o', '@OUTPUT@'] + if tgt.original_outputs: + command += ['-O'] + [x.as_posix() for x in tgt.original_outputs] + command += ['-d', tgt.working_dir.as_posix()] + + # Generate the commands. Subcommands are separated by ';;;' + for cmd in tgt.command: + command += [resolve_source(x) for x in cmd] + [';;;'] + + tgt_kwargs: TYPE_mixed_kwargs = { + 'input': [resolve_source(x) for x in tgt.inputs], + 'output': tgt.outputs, + 'command': command, + 'depends': [resolve_source(x) for x in tgt.depends], + } + + root_cb.lines += [assign(tgt_var, function('custom_target', [tgt.name], tgt_kwargs))] + processed[tgt.name] = {'inc': None, 'src': None, 'dep': None, 'tgt': tgt_var, 'func': 'custom_target'} + name_map[tgt.cmake_name] = tgt.name + + # Now generate the target function calls + for ctgt in self.custom_targets: + if ctgt.name not in processed: + process_custom_target(ctgt) + for tgt in self.targets: + if tgt.name not in processed: + process_target(tgt) + + self.generated_targets = processed + self.internal_name_map = name_map + return root_cb + + def target_info(self, target: str) -> T.Optional[T.Dict[str, str]]: + # Try resolving the target name + # start by checking if there is a 100% match (excluding the name prefix) + prx_tgt = _sanitize_cmake_name(target) + if prx_tgt in self.generated_targets: + return self.generated_targets[prx_tgt] + # check if there exists a name mapping + if target in self.internal_name_map: + target = self.internal_name_map[target] + assert target in self.generated_targets + return self.generated_targets[target] + return None + + def target_list(self) -> T.List[str]: + return list(self.internal_name_map.keys()) diff --git a/vendored-meson/meson/mesonbuild/cmake/toolchain.py b/vendored-meson/meson/mesonbuild/cmake/toolchain.py new file mode 100644 index 000000000000..477629e81164 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/cmake/toolchain.py @@ -0,0 +1,258 @@ +# Copyright 2020 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from pathlib import Path +from .traceparser import CMakeTraceParser +from ..envconfig import CMakeSkipCompilerTest +from .common import language_map, cmake_get_generator_args +from .. import mlog + +import shutil +import typing as T +from enum import Enum +from textwrap import dedent + +if T.TYPE_CHECKING: + from .executor import CMakeExecutor + from ..environment import Environment + from ..compilers import Compiler + from ..mesonlib import MachineChoice + +class CMakeExecScope(Enum): + SUBPROJECT = 'subproject' + DEPENDENCY = 'dependency' + +class CMakeToolchain: + def __init__(self, cmakebin: 'CMakeExecutor', env: 'Environment', for_machine: MachineChoice, exec_scope: CMakeExecScope, build_dir: Path, preload_file: T.Optional[Path] = None) -> None: + self.env = env + self.cmakebin = cmakebin + self.for_machine = for_machine + self.exec_scope = exec_scope + self.preload_file = preload_file + self.build_dir = build_dir + self.build_dir = self.build_dir.resolve() + self.toolchain_file = build_dir / 'CMakeMesonToolchainFile.cmake' + self.cmcache_file = build_dir / 'CMakeCache.txt' + self.minfo = self.env.machines[self.for_machine] + self.properties = self.env.properties[self.for_machine] + self.compilers = self.env.coredata.compilers[self.for_machine] + self.cmakevars = self.env.cmakevars[self.for_machine] + self.cmakestate = self.env.coredata.cmake_cache[self.for_machine] + + self.variables = self.get_defaults() + self.variables.update(self.cmakevars.get_variables()) + + # Determine whether CMake the compiler test should be skipped + skip_status = self.properties.get_cmake_skip_compiler_test() + self.skip_check = skip_status == CMakeSkipCompilerTest.ALWAYS + if skip_status == CMakeSkipCompilerTest.DEP_ONLY and self.exec_scope == CMakeExecScope.DEPENDENCY: + self.skip_check = True + if not self.properties.get_cmake_defaults(): + self.skip_check = False + + assert self.toolchain_file.is_absolute() + + def write(self) -> Path: + if not self.toolchain_file.parent.exists(): + self.toolchain_file.parent.mkdir(parents=True) + self.toolchain_file.write_text(self.generate(), encoding='utf-8') + self.cmcache_file.write_text(self.generate_cache(), encoding='utf-8') + mlog.cmd_ci_include(self.toolchain_file.as_posix()) + return self.toolchain_file + + def get_cmake_args(self) -> T.List[str]: + args = ['-DCMAKE_TOOLCHAIN_FILE=' + self.toolchain_file.as_posix()] + if self.preload_file is not None: + args += ['-DMESON_PRELOAD_FILE=' + self.preload_file.as_posix()] + return args + + @staticmethod + def _print_vars(vars: T.Dict[str, T.List[str]]) -> str: + res = '' + for key, value in vars.items(): + res += 'set(' + key + for i in value: + res += f' "{i}"' + res += ')\n' + return res + + def generate(self) -> str: + res = dedent('''\ + ###################################### + ### AUTOMATICALLY GENERATED FILE ### + ###################################### + + # This file was generated from the configuration in the + # relevant meson machine file. See the meson documentation + # https://mesonbuild.com/Machine-files.html for more information + + if(DEFINED MESON_PRELOAD_FILE) + include("${MESON_PRELOAD_FILE}") + endif() + + ''') + + # Escape all \ in the values + for key, value in self.variables.items(): + self.variables[key] = [x.replace('\\', '/') for x in value] + + # Set compiler + if self.skip_check: + self.update_cmake_compiler_state() + res += '# CMake compiler state variables\n' + for lang, vars in self.cmakestate: + res += f'# -- Variables for language {lang}\n' + res += self._print_vars(vars) + res += '\n' + res += '\n' + + # Set variables from the current machine config + res += '# Variables from meson\n' + res += self._print_vars(self.variables) + res += '\n' + + # Add the user provided toolchain file + user_file = self.properties.get_cmake_toolchain_file() + if user_file is not None: + res += dedent(''' + # Load the CMake toolchain file specified by the user + include("{}") + + '''.format(user_file.as_posix())) + + return res + + def generate_cache(self) -> str: + if not self.skip_check: + return '' + + res = '' + for name, v in self.cmakestate.cmake_cache.items(): + res += f'{name}:{v.type}={";".join(v.value)}\n' + return res + + def get_defaults(self) -> T.Dict[str, T.List[str]]: + defaults = {} # type: T.Dict[str, T.List[str]] + + # Do nothing if the user does not want automatic defaults + if not self.properties.get_cmake_defaults(): + return defaults + + # Best effort to map the meson system name to CMAKE_SYSTEM_NAME, which + # is not trivial since CMake lacks a list of all supported + # CMAKE_SYSTEM_NAME values. + SYSTEM_MAP = { + 'android': 'Android', + 'linux': 'Linux', + 'windows': 'Windows', + 'freebsd': 'FreeBSD', + 'darwin': 'Darwin', + } # type: T.Dict[str, str] + + # Only set these in a cross build. Otherwise CMake will trip up in native + # builds and thing they are cross (which causes TRY_RUN() to break) + if self.env.is_cross_build(when_building_for=self.for_machine): + defaults['CMAKE_SYSTEM_NAME'] = [SYSTEM_MAP.get(self.minfo.system, self.minfo.system)] + defaults['CMAKE_SYSTEM_PROCESSOR'] = [self.minfo.cpu_family] + + defaults['CMAKE_SIZEOF_VOID_P'] = ['8' if self.minfo.is_64_bit else '4'] + + sys_root = self.properties.get_sys_root() + if sys_root: + defaults['CMAKE_SYSROOT'] = [sys_root] + + def make_abs(exe: str) -> str: + if Path(exe).is_absolute(): + return exe + + p = shutil.which(exe) + if p is None: + return exe + return p + + # Set the compiler variables + for lang, comp_obj in self.compilers.items(): + prefix = 'CMAKE_{}_'.format(language_map.get(lang, lang.upper())) + + exe_list = comp_obj.get_exelist() + if not exe_list: + continue + + if len(exe_list) >= 2 and not self.is_cmdline_option(comp_obj, exe_list[1]): + defaults[prefix + 'COMPILER_LAUNCHER'] = [make_abs(exe_list[0])] + exe_list = exe_list[1:] + + exe_list[0] = make_abs(exe_list[0]) + defaults[prefix + 'COMPILER'] = exe_list + if comp_obj.get_id() == 'clang-cl': + defaults['CMAKE_LINKER'] = comp_obj.get_linker_exelist() + + return defaults + + @staticmethod + def is_cmdline_option(compiler: 'Compiler', arg: str) -> bool: + if compiler.get_argument_syntax() == 'msvc': + return arg.startswith('/') + else: + return arg.startswith('-') + + def update_cmake_compiler_state(self) -> None: + # Check if all variables are already cached + if self.cmakestate.languages.issuperset(self.compilers.keys()): + return + + # Generate the CMakeLists.txt + mlog.debug('CMake Toolchain: Calling CMake once to generate the compiler state') + languages = list(self.compilers.keys()) + lang_ids = [language_map.get(x, x.upper()) for x in languages] + cmake_content = dedent(f''' + cmake_minimum_required(VERSION 3.7) + project(CompInfo {' '.join(lang_ids)}) + ''') + + build_dir = Path(self.env.scratch_dir) / '__CMake_compiler_info__' + build_dir.mkdir(parents=True, exist_ok=True) + cmake_file = build_dir / 'CMakeLists.txt' + cmake_file.write_text(cmake_content, encoding='utf-8') + + # Generate the temporary toolchain file + temp_toolchain_file = build_dir / 'CMakeMesonTempToolchainFile.cmake' + temp_toolchain_file.write_text(CMakeToolchain._print_vars(self.variables), encoding='utf-8') + + # Configure + trace = CMakeTraceParser(self.cmakebin.version(), build_dir, self.env) + self.cmakebin.set_exec_mode(print_cmout=False, always_capture_stderr=trace.requires_stderr()) + cmake_args = [] + cmake_args += trace.trace_args() + cmake_args += cmake_get_generator_args(self.env) + cmake_args += [f'-DCMAKE_TOOLCHAIN_FILE={temp_toolchain_file.as_posix()}', '.'] + rc, _, raw_trace = self.cmakebin.call(cmake_args, build_dir=build_dir, disable_cache=True) + + if rc != 0: + mlog.warning('CMake Toolchain: Failed to determine CMake compilers state') + return + + # Parse output + trace.parse(raw_trace) + self.cmakestate.cmake_cache = {**trace.cache} + + vars_by_file = {k.name: v for (k, v) in trace.vars_by_file.items()} + + for lang in languages: + lang_cmake = language_map.get(lang, lang.upper()) + file_name = f'CMake{lang_cmake}Compiler.cmake' + vars = vars_by_file.setdefault(file_name, {}) + vars[f'CMAKE_{lang_cmake}_COMPILER_FORCED'] = ['1'] + self.cmakestate.update(lang, vars) diff --git a/vendored-meson/meson/mesonbuild/cmake/traceparser.py b/vendored-meson/meson/mesonbuild/cmake/traceparser.py new file mode 100644 index 000000000000..7f31f137f80b --- /dev/null +++ b/vendored-meson/meson/mesonbuild/cmake/traceparser.py @@ -0,0 +1,825 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This class contains the basic functionality needed to run any interpreter +# or an interpreter-based tool. +from __future__ import annotations + +from .common import CMakeException +from .generator import parse_generator_expressions +from .. import mlog +from ..mesonlib import version_compare + +import typing as T +from pathlib import Path +from functools import lru_cache +import re +import json +import textwrap + +if T.TYPE_CHECKING: + from ..environment import Environment + +class CMakeTraceLine: + def __init__(self, file_str: str, line: int, func: str, args: T.List[str]) -> None: + self.file = CMakeTraceLine._to_path(file_str) + self.line = line + self.func = func.lower() + self.args = args + + @staticmethod + @lru_cache(maxsize=None) + def _to_path(file_str: str) -> Path: + return Path(file_str) + + def __repr__(self) -> str: + s = 'CMake TRACE: {0}:{1} {2}({3})' + return s.format(self.file, self.line, self.func, self.args) + +class CMakeCacheEntry(T.NamedTuple): + value: T.List[str] + type: str + +class CMakeTarget: + def __init__( + self, + name: str, + target_type: str, + properties: T.Optional[T.Dict[str, T.List[str]]] = None, + imported: bool = False, + tline: T.Optional[CMakeTraceLine] = None + ): + if properties is None: + properties = {} + self.name = name + self.type = target_type + self.properties = properties + self.imported = imported + self.tline = tline + self.depends = [] # type: T.List[str] + self.current_bin_dir = None # type: T.Optional[Path] + self.current_src_dir = None # type: T.Optional[Path] + + def __repr__(self) -> str: + s = 'CMake TARGET:\n -- name: {}\n -- type: {}\n -- imported: {}\n -- properties: {{\n{} }}\n -- tline: {}' + propSTR = '' + for i in self.properties: + propSTR += " '{}': {}\n".format(i, self.properties[i]) + return s.format(self.name, self.type, self.imported, propSTR, self.tline) + + def strip_properties(self) -> None: + # Strip the strings in the properties + if not self.properties: + return + for key, val in self.properties.items(): + self.properties[key] = [x.strip() for x in val] + assert all(';' not in x for x in self.properties[key]) + +class CMakeGeneratorTarget(CMakeTarget): + def __init__(self, name: str) -> None: + super().__init__(name, 'CUSTOM', {}) + self.outputs = [] # type: T.List[Path] + self._outputs_str = [] # type: T.List[str] + self.command = [] # type: T.List[T.List[str]] + self.working_dir = None # type: T.Optional[Path] + +class CMakeTraceParser: + def __init__(self, cmake_version: str, build_dir: Path, env: 'Environment', permissive: bool = True) -> None: + self.vars: T.Dict[str, T.List[str]] = {} + self.vars_by_file: T.Dict[Path, T.Dict[str, T.List[str]]] = {} + self.targets: T.Dict[str, CMakeTarget] = {} + self.cache: T.Dict[str, CMakeCacheEntry] = {} + + self.explicit_headers = set() # type: T.Set[Path] + + # T.List of targes that were added with add_custom_command to generate files + self.custom_targets = [] # type: T.List[CMakeGeneratorTarget] + + self.env = env + self.permissive = permissive # type: bool + self.cmake_version = cmake_version # type: str + self.trace_file = 'cmake_trace.txt' + self.trace_file_path = build_dir / self.trace_file + self.trace_format = 'json-v1' if version_compare(cmake_version, '>=3.17') else 'human' + + self.errors: T.List[str] = [] + + # State for delayed command execution. Delayed command execution is realised + # with a custom CMake file that overrides some functions and adds some + # introspection information to the trace. + self.delayed_commands = [] # type: T.List[str] + self.stored_commands = [] # type: T.List[CMakeTraceLine] + + # All supported functions + self.functions = { + 'set': self._cmake_set, + 'unset': self._cmake_unset, + 'add_executable': self._cmake_add_executable, + 'add_library': self._cmake_add_library, + 'add_custom_command': self._cmake_add_custom_command, + 'add_custom_target': self._cmake_add_custom_target, + 'set_property': self._cmake_set_property, + 'set_target_properties': self._cmake_set_target_properties, + 'target_compile_definitions': self._cmake_target_compile_definitions, + 'target_compile_options': self._cmake_target_compile_options, + 'target_include_directories': self._cmake_target_include_directories, + 'target_link_libraries': self._cmake_target_link_libraries, + 'target_link_options': self._cmake_target_link_options, + 'add_dependencies': self._cmake_add_dependencies, + 'message': self._cmake_message, + + # Special functions defined in the preload script. + # These functions do nothing in the CMake code, but have special + # meaning here in the trace parser. + 'meson_ps_execute_delayed_calls': self._meson_ps_execute_delayed_calls, + 'meson_ps_reload_vars': self._meson_ps_reload_vars, + 'meson_ps_disabled_function': self._meson_ps_disabled_function, + } # type: T.Dict[str, T.Callable[[CMakeTraceLine], None]] + + if version_compare(self.cmake_version, '<3.17.0'): + mlog.deprecation(textwrap.dedent(f'''\ + CMake support for versions <3.17 is deprecated since Meson 0.62.0. + | + | However, Meson was only able to find CMake {self.cmake_version}. + | + | Support for all CMake versions below 3.17.0 will be removed once + | newer CMake versions are more widely adopted. If you encounter + | any errors please try upgrading CMake to a newer version first. + '''), once=True) + + def trace_args(self) -> T.List[str]: + arg_map = { + 'human': ['--trace', '--trace-expand'], + 'json-v1': ['--trace-expand', '--trace-format=json-v1'], + } + + base_args = ['--no-warn-unused-cli'] + if not self.requires_stderr(): + base_args += [f'--trace-redirect={self.trace_file}'] + + return arg_map[self.trace_format] + base_args + + def requires_stderr(self) -> bool: + return version_compare(self.cmake_version, '<3.16') + + def parse(self, trace: T.Optional[str] = None) -> None: + # First load the trace (if required) + if not self.requires_stderr(): + if not self.trace_file_path.exists and not self.trace_file_path.is_file(): + raise CMakeException(f'CMake: Trace file "{self.trace_file_path!s}" not found') + trace = self.trace_file_path.read_text(errors='ignore', encoding='utf-8') + if not trace: + raise CMakeException('CMake: The CMake trace was not provided or is empty') + + # Second parse the trace + lexer1 = None + if self.trace_format == 'human': + lexer1 = self._lex_trace_human(trace) + elif self.trace_format == 'json-v1': + lexer1 = self._lex_trace_json(trace) + else: + raise CMakeException(f'CMake: Internal error: Invalid trace format {self.trace_format}. Expected [human, json-v1]') + + # Primary pass -- parse everything + for l in lexer1: + # store the function if its execution should be delayed + if l.func in self.delayed_commands: + self.stored_commands += [l] + continue + + # "Execute" the CMake function if supported + fn = self.functions.get(l.func, None) + if fn: + fn(l) + + # Evaluate generator expressions + strlist_gen: T.Callable[[T.List[str]], T.List[str]] = lambda strlist: parse_generator_expressions(';'.join(strlist), self).split(';') if strlist else [] + pathlist_gen: T.Callable[[T.List[str]], T.List[Path]] = lambda strlist: [Path(x) for x in parse_generator_expressions(';'.join(strlist), self).split(';')] if strlist else [] + + self.vars = {k: strlist_gen(v) for k, v in self.vars.items()} + self.vars_by_file = { + p: {k: strlist_gen(v) for k, v in d.items()} + for p, d in self.vars_by_file.items() + } + self.explicit_headers = {Path(parse_generator_expressions(str(x), self)) for x in self.explicit_headers} + self.cache = { + k: CMakeCacheEntry( + strlist_gen(v.value), + v.type + ) + for k, v in self.cache.items() + } + + for tgt in self.targets.values(): + tgtlist_gen: T.Callable[[T.List[str], CMakeTarget], T.List[str]] = lambda strlist, t: parse_generator_expressions(';'.join(strlist), self, context_tgt=t).split(';') if strlist else [] + tgt.name = parse_generator_expressions(tgt.name, self, context_tgt=tgt) + tgt.type = parse_generator_expressions(tgt.type, self, context_tgt=tgt) + tgt.properties = { + k: tgtlist_gen(v, tgt) for k, v in tgt.properties.items() + } if tgt.properties is not None else None + tgt.depends = tgtlist_gen(tgt.depends, tgt) + + for ctgt in self.custom_targets: + ctgt.outputs = pathlist_gen(ctgt._outputs_str) + temp = ctgt.command + ctgt.command = [strlist_gen(x) for x in ctgt.command] + for command, src in zip(ctgt.command, temp): + if command[0] == "": + raise CMakeException( + "We evaluated the cmake variable '{}' to an empty string, which is not a valid path to an executable.".format(src[0]) + ) + ctgt.working_dir = Path(parse_generator_expressions(str(ctgt.working_dir), self)) if ctgt.working_dir is not None else None + + # Postprocess + for tgt in self.targets.values(): + tgt.strip_properties() + + def get_first_cmake_var_of(self, var_list: T.List[str]) -> T.List[str]: + # Return the first found CMake variable in list var_list + for i in var_list: + if i in self.vars: + return self.vars[i] + + return [] + + def get_cmake_var(self, var: str) -> T.List[str]: + # Return the value of the CMake variable var or an empty list if var does not exist + if var in self.vars: + return self.vars[var] + + return [] + + def var_to_str(self, var: str) -> T.Optional[str]: + if var in self.vars and self.vars[var]: + return self.vars[var][0] + + return None + + def _str_to_bool(self, expr: T.Union[str, T.List[str]]) -> bool: + if not expr: + return False + if isinstance(expr, list): + expr_str = expr[0] + else: + expr_str = expr + expr_str = expr_str.upper() + return expr_str not in ['0', 'OFF', 'NO', 'FALSE', 'N', 'IGNORE'] and not expr_str.endswith('NOTFOUND') + + def var_to_bool(self, var: str) -> bool: + return self._str_to_bool(self.vars.get(var, [])) + + def _gen_exception(self, function: str, error: str, tline: CMakeTraceLine) -> None: + # Generate an exception if the parser is not in permissive mode + + if self.permissive: + mlog.debug(f'CMake trace warning: {function}() {error}\n{tline}') + return None + raise CMakeException(f'CMake: {function}() {error}\n{tline}') + + def _cmake_set(self, tline: CMakeTraceLine) -> None: + """Handler for the CMake set() function in all varieties. + + comes in three flavors: + set( [PARENT_SCOPE]) + set( CACHE [FORCE]) + set(ENV{} ) + + We don't support the ENV variant, and any uses of it will be ignored + silently. the other two variates are supported, with some caveats: + - we don't properly handle scoping, so calls to set() inside a + function without PARENT_SCOPE set could incorrectly shadow the + outer scope. + - We don't honor the type of CACHE arguments + """ + # DOC: https://cmake.org/cmake/help/latest/command/set.html + + cache_type = None + cache_force = 'FORCE' in tline.args + try: + cache_idx = tline.args.index('CACHE') + cache_type = tline.args[cache_idx + 1] + except (ValueError, IndexError): + pass + + # 1st remove PARENT_SCOPE and CACHE from args + args = [] + for i in tline.args: + if not i or i == 'PARENT_SCOPE': + continue + + # Discard everything after the CACHE keyword + if i == 'CACHE': + break + + args.append(i) + + if len(args) < 1: + return self._gen_exception('set', 'requires at least one argument', tline) + + # Now that we've removed extra arguments all that should be left is the + # variable identifier and the value, join the value back together to + # ensure spaces in the value are correctly handled. This assumes that + # variable names don't have spaces. Please don't do that... + identifier = args.pop(0) + value = ' '.join(args) + + # Write to the CMake cache instead + if cache_type: + # Honor how the CMake FORCE parameter works + if identifier not in self.cache or cache_force: + self.cache[identifier] = CMakeCacheEntry(value.split(';'), cache_type) + + if not value: + # Same as unset + if identifier in self.vars: + del self.vars[identifier] + else: + self.vars[identifier] = value.split(';') + self.vars_by_file.setdefault(tline.file, {})[identifier] = value.split(';') + + def _cmake_unset(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/unset.html + if len(tline.args) < 1: + return self._gen_exception('unset', 'requires at least one argument', tline) + + if tline.args[0] in self.vars: + del self.vars[tline.args[0]] + + def _cmake_add_executable(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/add_executable.html + args = list(tline.args) # Make a working copy + + # Make sure the exe is imported + is_imported = True + if 'IMPORTED' not in args: + return self._gen_exception('add_executable', 'non imported executables are not supported', tline) + + args.remove('IMPORTED') + + if len(args) < 1: + return self._gen_exception('add_executable', 'requires at least 1 argument', tline) + + self.targets[args[0]] = CMakeTarget(args[0], 'EXECUTABLE', {}, tline=tline, imported=is_imported) + + def _cmake_add_library(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/add_library.html + args = list(tline.args) # Make a working copy + + # Make sure the lib is imported + if 'INTERFACE' in args: + args.remove('INTERFACE') + + if len(args) < 1: + return self._gen_exception('add_library', 'interface library name not specified', tline) + + self.targets[args[0]] = CMakeTarget(args[0], 'INTERFACE', {}, tline=tline, imported='IMPORTED' in args) + elif 'IMPORTED' in args: + args.remove('IMPORTED') + + # Now, only look at the first two arguments (target_name and target_type) and ignore the rest + if len(args) < 2: + return self._gen_exception('add_library', 'requires at least 2 arguments', tline) + + self.targets[args[0]] = CMakeTarget(args[0], args[1], {}, tline=tline, imported=True) + elif 'ALIAS' in args: + args.remove('ALIAS') + + # Now, only look at the first two arguments (target_name and target_ref) and ignore the rest + if len(args) < 2: + return self._gen_exception('add_library', 'requires at least 2 arguments', tline) + + # Simulate the ALIAS with INTERFACE_LINK_LIBRARIES + self.targets[args[0]] = CMakeTarget(args[0], 'ALIAS', {'INTERFACE_LINK_LIBRARIES': [args[1]]}, tline=tline) + elif 'OBJECT' in args: + return self._gen_exception('add_library', 'OBJECT libraries are not supported', tline) + else: + self.targets[args[0]] = CMakeTarget(args[0], 'NORMAL', {}, tline=tline) + + def _cmake_add_custom_command(self, tline: CMakeTraceLine, name: T.Optional[str] = None) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/add_custom_command.html + args = self._flatten_args(list(tline.args)) # Commands can be passed as ';' separated lists + + if not args: + return self._gen_exception('add_custom_command', 'requires at least 1 argument', tline) + + # Skip the second function signature + if args[0] == 'TARGET': + return self._gen_exception('add_custom_command', 'TARGET syntax is currently not supported', tline) + + magic_keys = ['OUTPUT', 'COMMAND', 'MAIN_DEPENDENCY', 'DEPENDS', 'BYPRODUCTS', + 'IMPLICIT_DEPENDS', 'WORKING_DIRECTORY', 'COMMENT', 'DEPFILE', + 'JOB_POOL', 'VERBATIM', 'APPEND', 'USES_TERMINAL', 'COMMAND_EXPAND_LISTS'] + + target = CMakeGeneratorTarget(name) + + def handle_output(key: str, target: CMakeGeneratorTarget) -> None: + target._outputs_str += [key] + + def handle_command(key: str, target: CMakeGeneratorTarget) -> None: + if key == 'ARGS': + return + target.command[-1] += [key] + + def handle_depends(key: str, target: CMakeGeneratorTarget) -> None: + target.depends += [key] + + working_dir = None + + def handle_working_dir(key: str, target: CMakeGeneratorTarget) -> None: + nonlocal working_dir + if working_dir is None: + working_dir = key + else: + working_dir += ' ' + working_dir += key + + fn = None + + for i in args: + if i in magic_keys: + if i == 'OUTPUT': + fn = handle_output + elif i == 'DEPENDS': + fn = handle_depends + elif i == 'WORKING_DIRECTORY': + fn = handle_working_dir + elif i == 'COMMAND': + fn = handle_command + target.command += [[]] + else: + fn = None + continue + + if fn is not None: + fn(i, target) + + cbinary_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_BINARY_DIR') + csource_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_SOURCE_DIR') + + target.working_dir = Path(working_dir) if working_dir else None + target.current_bin_dir = Path(cbinary_dir) if cbinary_dir else None + target.current_src_dir = Path(csource_dir) if csource_dir else None + target._outputs_str = self._guess_files(target._outputs_str) + target.depends = self._guess_files(target.depends) + target.command = [self._guess_files(x) for x in target.command] + + self.custom_targets += [target] + if name: + self.targets[name] = target + + def _cmake_add_custom_target(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/add_custom_target.html + # We only the first parameter (the target name) is interesting + if len(tline.args) < 1: + return self._gen_exception('add_custom_target', 'requires at least one argument', tline) + + # It's pretty much the same as a custom command + self._cmake_add_custom_command(tline, tline.args[0]) + + def _cmake_set_property(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/set_property.html + args = list(tline.args) + + scope = args.pop(0) + + append = False + targets = [] + while args: + curr = args.pop(0) + # XXX: APPEND_STRING is specifically *not* supposed to create a + # list, is treating them as aliases really okay? + if curr in {'APPEND', 'APPEND_STRING'}: + append = True + continue + + if curr == 'PROPERTY': + break + + targets += curr.split(';') + + if not args: + return self._gen_exception('set_property', 'failed to parse argument list', tline) + + if len(args) == 1: + # Tries to set property to nothing so nothing has to be done + return + + identifier = args.pop(0) + if self.trace_format == 'human': + value = ' '.join(args).split(';') + else: + value = [y for x in args for y in x.split(';')] + if not value: + return + + def do_target(t: str) -> None: + if t not in self.targets: + return self._gen_exception('set_property', f'TARGET {t} not found', tline) + + tgt = self.targets[t] + if identifier not in tgt.properties: + tgt.properties[identifier] = [] + + if append: + tgt.properties[identifier] += value + else: + tgt.properties[identifier] = value + + def do_source(src: str) -> None: + if identifier != 'HEADER_FILE_ONLY' or not self._str_to_bool(value): + return + + current_src_dir = self.var_to_str('MESON_PS_CMAKE_CURRENT_SOURCE_DIR') + if not current_src_dir: + mlog.warning(textwrap.dedent('''\ + CMake trace: set_property(SOURCE) called before the preload script was loaded. + Unable to determine CMAKE_CURRENT_SOURCE_DIR. This can lead to build errors. + ''')) + current_src_dir = '.' + + cur_p = Path(current_src_dir) + src_p = Path(src) + + if not src_p.is_absolute(): + src_p = cur_p / src_p + self.explicit_headers.add(src_p) + + if scope == 'TARGET': + for i in targets: + do_target(i) + elif scope == 'SOURCE': + files = self._guess_files(targets) + for i in files: + do_source(i) + + def _cmake_set_target_properties(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/set_target_properties.html + args = list(tline.args) + + targets = [] + while args: + curr = args.pop(0) + if curr == 'PROPERTIES': + break + + targets.append(curr) + + # Now we need to try to reconstitute the original quoted format of the + # arguments, as a property value could have spaces in it. Unlike + # set_property() this is not context free. There are two approaches I + # can think of, both have drawbacks: + # + # 1. Assume that the property will be capitalized ([A-Z_]), this is + # convention but cmake doesn't require it. + # 2. Maintain a copy of the list here: https://cmake.org/cmake/help/latest/manual/cmake-properties.7.html#target-properties + # + # Neither of these is awesome for obvious reasons. I'm going to try + # option 1 first and fall back to 2, as 1 requires less code and less + # synchronization for cmake changes. + # + # With the JSON output format, introduced in CMake 3.17, spaces are + # handled properly and we don't have to do either options + + arglist = [] # type: T.List[T.Tuple[str, T.List[str]]] + if self.trace_format == 'human': + name = args.pop(0) + values = [] # type: T.List[str] + prop_regex = re.compile(r'^[A-Z_]+$') + for a in args: + if prop_regex.match(a): + if values: + arglist.append((name, ' '.join(values).split(';'))) + name = a + values = [] + else: + values.append(a) + if values: + arglist.append((name, ' '.join(values).split(';'))) + else: + arglist = [(x[0], x[1].split(';')) for x in zip(args[::2], args[1::2])] + + for name, value in arglist: + for i in targets: + if i not in self.targets: + return self._gen_exception('set_target_properties', f'TARGET {i} not found', tline) + + self.targets[i].properties[name] = value + + def _cmake_add_dependencies(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/add_dependencies.html + args = list(tline.args) + + if len(args) < 2: + return self._gen_exception('add_dependencies', 'takes at least 2 arguments', tline) + + target = self.targets.get(args[0]) + if not target: + return self._gen_exception('add_dependencies', 'target not found', tline) + + for i in args[1:]: + target.depends += i.split(';') + + def _cmake_target_compile_definitions(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/target_compile_definitions.html + self._parse_common_target_options('target_compile_definitions', 'COMPILE_DEFINITIONS', 'INTERFACE_COMPILE_DEFINITIONS', tline) + + def _cmake_target_compile_options(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/target_compile_options.html + self._parse_common_target_options('target_compile_options', 'COMPILE_OPTIONS', 'INTERFACE_COMPILE_OPTIONS', tline) + + def _cmake_target_include_directories(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/target_include_directories.html + self._parse_common_target_options('target_include_directories', 'INCLUDE_DIRECTORIES', 'INTERFACE_INCLUDE_DIRECTORIES', tline, ignore=['SYSTEM', 'BEFORE'], paths=True) + + def _cmake_target_link_options(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/target_link_options.html + self._parse_common_target_options('target_link_options', 'LINK_OPTIONS', 'INTERFACE_LINK_OPTIONS', tline) + + def _cmake_target_link_libraries(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/target_link_libraries.html + self._parse_common_target_options('target_link_options', 'LINK_LIBRARIES', 'INTERFACE_LINK_LIBRARIES', tline) + + def _cmake_message(self, tline: CMakeTraceLine) -> None: + # DOC: https://cmake.org/cmake/help/latest/command/message.html + args = list(tline.args) + + if len(args) < 1: + return self._gen_exception('message', 'takes at least 1 argument', tline) + + if args[0].upper().strip() not in ['FATAL_ERROR', 'SEND_ERROR']: + return + + self.errors += [' '.join(args[1:])] + + def _parse_common_target_options(self, func: str, private_prop: str, interface_prop: str, tline: CMakeTraceLine, ignore: T.Optional[T.List[str]] = None, paths: bool = False) -> None: + if ignore is None: + ignore = ['BEFORE'] + + args = list(tline.args) + + if len(args) < 1: + return self._gen_exception(func, 'requires at least one argument', tline) + + target = args[0] + if target not in self.targets: + return self._gen_exception(func, f'TARGET {target} not found', tline) + + interface = [] + private = [] + + mode = 'PUBLIC' + for i in args[1:]: + if i in ignore: + continue + + if i in {'INTERFACE', 'LINK_INTERFACE_LIBRARIES', 'PUBLIC', 'PRIVATE', 'LINK_PUBLIC', 'LINK_PRIVATE'}: + mode = i + continue + + if mode in {'INTERFACE', 'LINK_INTERFACE_LIBRARIES', 'PUBLIC', 'LINK_PUBLIC'}: + interface += i.split(';') + + if mode in {'PUBLIC', 'PRIVATE', 'LINK_PRIVATE'}: + private += i.split(';') + + if paths: + interface = self._guess_files(interface) + private = self._guess_files(private) + + interface = [x for x in interface if x] + private = [x for x in private if x] + + for j in [(private_prop, private), (interface_prop, interface)]: + if not j[0] in self.targets[target].properties: + self.targets[target].properties[j[0]] = [] + + self.targets[target].properties[j[0]] += j[1] + + def _meson_ps_execute_delayed_calls(self, tline: CMakeTraceLine) -> None: + for l in self.stored_commands: + fn = self.functions.get(l.func, None) + if fn: + fn(l) + + # clear the stored commands + self.stored_commands = [] + + def _meson_ps_reload_vars(self, tline: CMakeTraceLine) -> None: + self.delayed_commands = self.get_cmake_var('MESON_PS_DELAYED_CALLS') + + def _meson_ps_disabled_function(self, tline: CMakeTraceLine) -> None: + args = list(tline.args) + if not args: + mlog.error('Invalid preload.cmake script! At least one argument to `meson_ps_disabled_function` is expected') + return + mlog.warning(f'The CMake function "{args[0]}" was disabled to avoid compatibility issues with Meson.') + + def _lex_trace_human(self, trace: str) -> T.Generator[CMakeTraceLine, None, None]: + # The trace format is: '(): ( )\n' + reg_tline = re.compile(r'\s*(.*\.(cmake|txt))\(([0-9]+)\):\s*(\w+)\(([\s\S]*?) ?\)\s*\n', re.MULTILINE) + reg_other = re.compile(r'[^\n]*\n') + loc = 0 + while loc < len(trace): + mo_file_line = reg_tline.match(trace, loc) + if not mo_file_line: + skip_match = reg_other.match(trace, loc) + if not skip_match: + print(trace[loc:]) + raise CMakeException('Failed to parse CMake trace') + + loc = skip_match.end() + continue + + loc = mo_file_line.end() + + file = mo_file_line.group(1) + line = mo_file_line.group(3) + func = mo_file_line.group(4) + args = mo_file_line.group(5) + argl = args.split(' ') + argl = [a.strip() for a in argl] + + yield CMakeTraceLine(file, int(line), func, argl) + + def _lex_trace_json(self, trace: str) -> T.Generator[CMakeTraceLine, None, None]: + lines = trace.splitlines(keepends=False) + lines.pop(0) # The first line is the version + for i in lines: + data = json.loads(i) + assert isinstance(data['file'], str) + assert isinstance(data['line'], int) + assert isinstance(data['cmd'], str) + assert isinstance(data['args'], list) + args = data['args'] + for j in args: + assert isinstance(j, str) + yield CMakeTraceLine(data['file'], data['line'], data['cmd'], args) + + def _flatten_args(self, args: T.List[str]) -> T.List[str]: + # Split lists in arguments + res = [] # type: T.List[str] + for i in args: + res += i.split(';') + return res + + def _guess_files(self, broken_list: T.List[str]) -> T.List[str]: + # Nothing has to be done for newer formats + if self.trace_format != 'human': + return broken_list + + # Try joining file paths that contain spaces + + reg_start = re.compile(r'^([A-Za-z]:)?/(.*/)*[^./]+$') + reg_end = re.compile(r'^.*\.[a-zA-Z]+$') + + fixed_list = [] # type: T.List[str] + curr_str = None # type: T.Optional[str] + path_found = False # type: bool + + for i in broken_list: + if curr_str is None: + curr_str = i + path_found = False + elif Path(curr_str).is_file(): + # Abort concatenation if curr_str is an existing file + fixed_list += [curr_str] + curr_str = i + path_found = False + elif not reg_start.match(curr_str): + # Abort concatenation if curr_str no longer matches the regex + fixed_list += [curr_str] + curr_str = i + path_found = False + elif reg_end.match(i): + # File detected + curr_str = f'{curr_str} {i}' + fixed_list += [curr_str] + curr_str = None + path_found = False + elif Path(f'{curr_str} {i}').exists(): + # Path detected + curr_str = f'{curr_str} {i}' + path_found = True + elif path_found: + # Add path to fixed_list after ensuring the whole path is in curr_str + fixed_list += [curr_str] + curr_str = i + path_found = False + else: + curr_str = f'{curr_str} {i}' + path_found = False + + if curr_str: + fixed_list += [curr_str] + return fixed_list diff --git a/vendored-meson/meson/mesonbuild/cmake/tracetargets.py b/vendored-meson/meson/mesonbuild/cmake/tracetargets.py new file mode 100644 index 000000000000..338364dbd3f5 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/cmake/tracetargets.py @@ -0,0 +1,119 @@ +# SPDX-License-Identifer: Apache-2.0 +# Copyright 2021 The Meson development team +from __future__ import annotations + +from .common import cmake_is_debug +from .. import mlog + +from pathlib import Path +import re +import typing as T + +if T.TYPE_CHECKING: + from .traceparser import CMakeTraceParser + from ..environment import Environment + from ..compilers import Compiler + from ..dependencies import MissingCompiler + +class ResolvedTarget: + def __init__(self) -> None: + self.include_directories: T.List[str] = [] + self.link_flags: T.List[str] = [] + self.public_compile_opts: T.List[str] = [] + self.libraries: T.List[str] = [] + +def resolve_cmake_trace_targets(target_name: str, + trace: 'CMakeTraceParser', + env: 'Environment', + *, + clib_compiler: T.Union['MissingCompiler', 'Compiler'] = None, + not_found_warning: T.Callable[[str], None] = lambda x: None) -> ResolvedTarget: + res = ResolvedTarget() + targets = [target_name] + + # recognise arguments we should pass directly to the linker + reg_is_lib = re.compile(r'^(-l[a-zA-Z0-9_]+|-l?pthread)$') + reg_is_maybe_bare_lib = re.compile(r'^[a-zA-Z0-9_]+$') + + is_debug = cmake_is_debug(env) + + processed_targets: T.List[str] = [] + while len(targets) > 0: + curr = targets.pop(0) + + # Skip already processed targets + if curr in processed_targets: + continue + + if curr not in trace.targets: + if reg_is_lib.match(curr): + res.libraries += [curr] + elif Path(curr).is_absolute() and Path(curr).exists(): + res.libraries += [curr] + elif env.machines.build.is_windows() and reg_is_maybe_bare_lib.match(curr) and clib_compiler: + # On Windows, CMake library dependencies can be passed as bare library names, + # CMake brute-forces a combination of prefix/suffix combinations to find the + # right library. Assume any bare argument passed which is not also a CMake + # target must be a system library we should try to link against. + res.libraries += clib_compiler.find_library(curr, env, []) + else: + not_found_warning(curr) + continue + + tgt = trace.targets[curr] + cfgs = [] + cfg = '' + mlog.debug(tgt) + + if 'INTERFACE_INCLUDE_DIRECTORIES' in tgt.properties: + res.include_directories += [x for x in tgt.properties['INTERFACE_INCLUDE_DIRECTORIES'] if x] + + if 'INTERFACE_LINK_OPTIONS' in tgt.properties: + res.link_flags += [x for x in tgt.properties['INTERFACE_LINK_OPTIONS'] if x] + + if 'INTERFACE_COMPILE_DEFINITIONS' in tgt.properties: + res.public_compile_opts += ['-D' + re.sub('^-D', '', x) for x in tgt.properties['INTERFACE_COMPILE_DEFINITIONS'] if x] + + if 'INTERFACE_COMPILE_OPTIONS' in tgt.properties: + res.public_compile_opts += [x for x in tgt.properties['INTERFACE_COMPILE_OPTIONS'] if x] + + if 'IMPORTED_CONFIGURATIONS' in tgt.properties: + cfgs = [x for x in tgt.properties['IMPORTED_CONFIGURATIONS'] if x] + cfg = cfgs[0] + + if is_debug: + if 'DEBUG' in cfgs: + cfg = 'DEBUG' + elif 'RELEASE' in cfgs: + cfg = 'RELEASE' + else: + if 'RELEASE' in cfgs: + cfg = 'RELEASE' + + if f'IMPORTED_IMPLIB_{cfg}' in tgt.properties: + res.libraries += [x for x in tgt.properties[f'IMPORTED_IMPLIB_{cfg}'] if x] + elif 'IMPORTED_IMPLIB' in tgt.properties: + res.libraries += [x for x in tgt.properties['IMPORTED_IMPLIB'] if x] + elif f'IMPORTED_LOCATION_{cfg}' in tgt.properties: + res.libraries += [x for x in tgt.properties[f'IMPORTED_LOCATION_{cfg}'] if x] + elif 'IMPORTED_LOCATION' in tgt.properties: + res.libraries += [x for x in tgt.properties['IMPORTED_LOCATION'] if x] + + if 'LINK_LIBRARIES' in tgt.properties: + targets += [x for x in tgt.properties['LINK_LIBRARIES'] if x] + if 'INTERFACE_LINK_LIBRARIES' in tgt.properties: + targets += [x for x in tgt.properties['INTERFACE_LINK_LIBRARIES'] if x] + + if f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}' in tgt.properties: + targets += [x for x in tgt.properties[f'IMPORTED_LINK_DEPENDENT_LIBRARIES_{cfg}'] if x] + elif 'IMPORTED_LINK_DEPENDENT_LIBRARIES' in tgt.properties: + targets += [x for x in tgt.properties['IMPORTED_LINK_DEPENDENT_LIBRARIES'] if x] + + processed_targets += [curr] + + res.include_directories = sorted(set(res.include_directories)) + res.link_flags = sorted(set(res.link_flags)) + res.public_compile_opts = sorted(set(res.public_compile_opts)) + res.libraries = sorted(set(res.libraries)) + + return res diff --git a/vendored-meson/meson/mesonbuild/compilers/__init__.py b/vendored-meson/meson/mesonbuild/compilers/__init__.py new file mode 100644 index 000000000000..3d1a23e7210f --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/__init__.py @@ -0,0 +1,99 @@ +# Copyright 2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Public symbols for compilers sub-package when using 'from . import compilers' +__all__ = [ + 'Compiler', + 'CompileResult', + 'RunResult', + + 'all_languages', + 'base_options', + 'clib_langs', + 'clink_langs', + 'c_suffixes', + 'cpp_suffixes', + 'get_base_compile_args', + 'get_base_link_args', + 'is_assembly', + 'is_header', + 'is_library', + 'is_llvm_ir', + 'is_object', + 'is_source', + 'is_known_suffix', + 'lang_suffixes', + 'LANGUAGES_USING_LDFLAGS', + 'sort_clink', + 'SUFFIX_TO_LANG', + + 'compiler_from_language', + 'detect_compiler_for', + 'detect_static_linker', + 'detect_c_compiler', + 'detect_cpp_compiler', + 'detect_cuda_compiler', + 'detect_fortran_compiler', + 'detect_objc_compiler', + 'detect_objcpp_compiler', + 'detect_java_compiler', + 'detect_cs_compiler', + 'detect_vala_compiler', + 'detect_rust_compiler', + 'detect_d_compiler', + 'detect_swift_compiler', +] + +# Bring symbols from each module into compilers sub-package namespace +from .compilers import ( + Compiler, + CompileResult, + RunResult, + all_languages, + base_options, + clib_langs, + clink_langs, + c_suffixes, + cpp_suffixes, + get_base_compile_args, + get_base_link_args, + is_header, + is_source, + is_assembly, + is_llvm_ir, + is_object, + is_library, + is_known_suffix, + lang_suffixes, + LANGUAGES_USING_LDFLAGS, + sort_clink, + SUFFIX_TO_LANG, +) +from .detect import ( + compiler_from_language, + detect_compiler_for, + detect_static_linker, + detect_c_compiler, + detect_cpp_compiler, + detect_cuda_compiler, + detect_objc_compiler, + detect_objcpp_compiler, + detect_fortran_compiler, + detect_java_compiler, + detect_cs_compiler, + detect_vala_compiler, + detect_rust_compiler, + detect_d_compiler, + detect_swift_compiler, +) diff --git a/vendored-meson/meson/mesonbuild/compilers/asm.py b/vendored-meson/meson/mesonbuild/compilers/asm.py new file mode 100644 index 000000000000..f25473b20fbc --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/asm.py @@ -0,0 +1,335 @@ +import os +import typing as T + +from ..mesonlib import EnvironmentException, OptionKey, get_meson_command +from .compilers import Compiler +from .mixins.metrowerks import MetrowerksCompiler, mwasmarm_instruction_set_args, mwasmeppc_instruction_set_args + +if T.TYPE_CHECKING: + from ..environment import Environment + from ..linkers.linkers import DynamicLinker + from ..mesonlib import MachineChoice + from ..envconfig import MachineInfo + +nasm_optimization_args: T.Dict[str, T.List[str]] = { + 'plain': [], + '0': ['-O0'], + 'g': ['-O0'], + '1': ['-O1'], + '2': ['-Ox'], + '3': ['-Ox'], + 's': ['-Ox'], +} + + +class NasmCompiler(Compiler): + language = 'nasm' + id = 'nasm' + + # https://learn.microsoft.com/en-us/cpp/c-runtime-library/crt-library-features + crt_args: T.Dict[str, T.List[str]] = { + 'none': [], + 'md': ['/DEFAULTLIB:ucrt.lib', '/DEFAULTLIB:vcruntime.lib', '/DEFAULTLIB:msvcrt.lib'], + 'mdd': ['/DEFAULTLIB:ucrtd.lib', '/DEFAULTLIB:vcruntimed.lib', '/DEFAULTLIB:msvcrtd.lib'], + 'mt': ['/DEFAULTLIB:libucrt.lib', '/DEFAULTLIB:libvcruntime.lib', '/DEFAULTLIB:libcmt.lib'], + 'mtd': ['/DEFAULTLIB:libucrtd.lib', '/DEFAULTLIB:libvcruntimed.lib', '/DEFAULTLIB:libcmtd.lib'], + } + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, + for_machine: 'MachineChoice', info: 'MachineInfo', + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, is_cross: bool = False): + super().__init__(ccache, exelist, version, for_machine, info, linker, full_version, is_cross) + if 'link' in self.linker.id: + self.base_options.add(OptionKey('b_vscrt')) + + def needs_static_linker(self) -> bool: + return True + + def get_always_args(self) -> T.List[str]: + cpu = '64' if self.info.is_64_bit else '32' + if self.info.is_windows() or self.info.is_cygwin(): + plat = 'win' + define = f'WIN{cpu}' + elif self.info.is_darwin(): + plat = 'macho' + define = 'MACHO' + else: + plat = 'elf' + define = 'ELF' + args = ['-f', f'{plat}{cpu}', f'-D{define}'] + if self.info.is_64_bit: + args.append('-D__x86_64__') + return args + + def get_werror_args(self) -> T.List[str]: + return ['-Werror'] + + def get_output_args(self, outputname: str) -> T.List[str]: + return ['-o', outputname] + + def unix_args_to_native(self, args: T.List[str]) -> T.List[str]: + outargs: T.List[str] = [] + for arg in args: + if arg == '-pthread': + continue + outargs.append(arg) + return outargs + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return nasm_optimization_args[optimization_level] + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + if is_debug: + if self.info.is_windows(): + return [] + return ['-g', '-F', 'dwarf'] + return [] + + def get_depfile_suffix(self) -> str: + return 'd' + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + return ['-MD', outfile, '-MQ', outtarget] + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + if self.info.cpu_family not in {'x86', 'x86_64'}: + raise EnvironmentException(f'ASM compiler {self.id!r} does not support {self.info.cpu_family} CPU family') + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + # FIXME: Not implemented + return [] + + def get_pic_args(self) -> T.List[str]: + return [] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if not path: + path = '.' + return ['-I' + path] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:2] == '-I': + parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:])) + return parameter_list + + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + return [] + + # Linking ASM-only objects into an executable or DLL + # require this, otherwise it'll fail to find + # _WinMain or _DllMainCRTStartup. + def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]: + if not self.info.is_windows(): + return [] + if crt_val in self.crt_args: + return self.crt_args[crt_val] + assert crt_val in {'from_buildtype', 'static_from_buildtype'} + dbg = 'mdd' + rel = 'md' + if crt_val == 'static_from_buildtype': + dbg = 'mtd' + rel = 'mt' + # Match what build type flags used to do. + if buildtype == 'plain': + return [] + elif buildtype == 'debug': + return self.crt_args[dbg] + elif buildtype == 'debugoptimized': + return self.crt_args[rel] + elif buildtype == 'release': + return self.crt_args[rel] + elif buildtype == 'minsize': + return self.crt_args[rel] + else: + assert buildtype == 'custom' + raise EnvironmentException('Requested C runtime based on buildtype, but buildtype is "custom".') + +class YasmCompiler(NasmCompiler): + id = 'yasm' + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + # Yasm is incompatible with Nasm optimization flags. + return [] + + def get_exelist(self, ccache: bool = True) -> T.List[str]: + # Wrap yasm executable with an internal script that will write depfile. + exelist = super().get_exelist(ccache) + return get_meson_command() + ['--internal', 'yasm'] + exelist + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + if is_debug: + if self.info.is_windows(): + return ['-g', 'null'] + return ['-g', 'dwarf2'] + return [] + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + return ['--depfile', outfile] + +# https://learn.microsoft.com/en-us/cpp/assembler/masm/ml-and-ml64-command-line-reference +class MasmCompiler(Compiler): + language = 'masm' + id = 'ml' + + def get_compile_only_args(self) -> T.List[str]: + return ['/c'] + + def get_argument_syntax(self) -> str: + return 'msvc' + + def needs_static_linker(self) -> bool: + return True + + def get_always_args(self) -> T.List[str]: + return ['/nologo'] + + def get_werror_args(self) -> T.List[str]: + return ['/WX'] + + def get_output_args(self, outputname: str) -> T.List[str]: + return ['/Fo', outputname] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return [] + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + if is_debug: + return ['/Zi'] + return [] + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + if self.info.cpu_family not in {'x86', 'x86_64'}: + raise EnvironmentException(f'ASM compiler {self.id!r} does not support {self.info.cpu_family} CPU family') + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + # FIXME: Not implemented + return [] + + def get_pic_args(self) -> T.List[str]: + return [] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if not path: + path = '.' + return ['-I' + path] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:2] == '-I' or i[:2] == '/I': + parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:])) + return parameter_list + + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + return [] + + def depfile_for_object(self, objfile: str) -> T.Optional[str]: + return None + + +# https://learn.microsoft.com/en-us/cpp/assembler/arm/arm-assembler-command-line-reference +class MasmARMCompiler(Compiler): + language = 'masm' + id = 'armasm' + + def needs_static_linker(self) -> bool: + return True + + def get_always_args(self) -> T.List[str]: + return ['-nologo'] + + def get_werror_args(self) -> T.List[str]: + return [] + + def get_output_args(self, outputname: str) -> T.List[str]: + return ['-o', outputname] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return [] + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + if is_debug: + return ['-g'] + return [] + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + if self.info.cpu_family not in {'arm', 'aarch64'}: + raise EnvironmentException(f'ASM compiler {self.id!r} does not support {self.info.cpu_family} CPU family') + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + # FIXME: Not implemented + return [] + + def get_pic_args(self) -> T.List[str]: + return [] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if not path: + path = '.' + return ['-i' + path] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:2] == '-I': + parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:])) + return parameter_list + + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + return [] + + def depfile_for_object(self, objfile: str) -> T.Optional[str]: + return None + + +class MetrowerksAsmCompiler(MetrowerksCompiler, Compiler): + language = 'nasm' + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, + for_machine: 'MachineChoice', info: 'MachineInfo', + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, is_cross: bool = False): + Compiler.__init__(self, ccache, exelist, version, for_machine, info, linker, full_version, is_cross) + MetrowerksCompiler.__init__(self) + + self.warn_args: T.Dict[str, T.List[str]] = { + '0': [], + '1': [], + '2': [], + '3': [], + 'everything': []} + self.can_compile_suffixes.add('s') + + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + return [] + + def get_pic_args(self) -> T.List[str]: + return [] + + def needs_static_linker(self) -> bool: + return True + + +class MetrowerksAsmCompilerARM(MetrowerksAsmCompiler): + id = 'mwasmarm' + + def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]: + return mwasmarm_instruction_set_args.get(instruction_set, None) + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + if self.info.cpu_family not in {'arm'}: + raise EnvironmentException(f'ASM compiler {self.id!r} does not support {self.info.cpu_family} CPU family') + + +class MetrowerksAsmCompilerEmbeddedPowerPC(MetrowerksAsmCompiler): + id = 'mwasmeppc' + + def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]: + return mwasmeppc_instruction_set_args.get(instruction_set, None) + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + if self.info.cpu_family not in {'ppc'}: + raise EnvironmentException(f'ASM compiler {self.id!r} does not support {self.info.cpu_family} CPU family') diff --git a/vendored-meson/meson/mesonbuild/compilers/c.py b/vendored-meson/meson/mesonbuild/compilers/c.py new file mode 100644 index 000000000000..d514650de71f --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/c.py @@ -0,0 +1,799 @@ +# Copyright 2012-2020 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import os.path +import typing as T + +from .. import coredata +from .. import mlog +from ..mesonlib import MesonException, version_compare, OptionKey +from .c_function_attributes import C_FUNC_ATTRIBUTES +from .mixins.clike import CLikeCompiler +from .mixins.ccrx import CcrxCompiler +from .mixins.xc16 import Xc16Compiler +from .mixins.compcert import CompCertCompiler +from .mixins.ti import TICompiler +from .mixins.arm import ArmCompiler, ArmclangCompiler +from .mixins.visualstudio import MSVCCompiler, ClangClCompiler +from .mixins.gnu import GnuCompiler +from .mixins.gnu import gnu_common_warning_args, gnu_c_warning_args +from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler +from .mixins.clang import ClangCompiler +from .mixins.elbrus import ElbrusCompiler +from .mixins.pgi import PGICompiler +from .mixins.emscripten import EmscriptenMixin +from .mixins.metrowerks import MetrowerksCompiler +from .mixins.metrowerks import mwccarm_instruction_set_args, mwcceppc_instruction_set_args +from .compilers import ( + gnu_winlibs, + msvc_winlibs, + Compiler, +) + +if T.TYPE_CHECKING: + from ..coredata import MutableKeyedOptionDictType, KeyedOptionDictType + from ..dependencies import Dependency + from ..envconfig import MachineInfo + from ..environment import Environment + from ..linkers.linkers import DynamicLinker + from ..mesonlib import MachineChoice + from ..programs import ExternalProgram + from .compilers import CompileCheckMode + + CompilerMixinBase = Compiler +else: + CompilerMixinBase = object + + +class CCompiler(CLikeCompiler, Compiler): + def attribute_check_func(self, name: str) -> str: + try: + return C_FUNC_ATTRIBUTES[name] + except KeyError: + raise MesonException(f'Unknown function attribute "{name}"') + + language = 'c' + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + # If a child ObjC or CPP class has already set it, don't set it ourselves + Compiler.__init__(self, ccache, exelist, version, for_machine, info, + is_cross=is_cross, full_version=full_version, linker=linker) + CLikeCompiler.__init__(self, exe_wrapper) + + def get_no_stdinc_args(self) -> T.List[str]: + return ['-nostdinc'] + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + code = 'int main(void) { int class=0; return class; }\n' + return self._sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code) + + def has_header_symbol(self, hname: str, symbol: str, prefix: str, + env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[['CompileCheckMode'], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol} + t = '''{prefix} + #include <{header}> + int main(void) {{ + /* If it's not defined as a macro, try to use as a symbol */ + #ifndef {symbol} + {symbol}; + #endif + return 0; + }}''' + return self.compiles(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = super().get_options() + opts.update({ + OptionKey('std', machine=self.for_machine, lang=self.language): coredata.UserComboOption( + 'C language standard to use', + ['none'], + 'none', + ) + }) + return opts + + +class _ClangCStds(CompilerMixinBase): + + """Mixin class for clang based compilers for setting C standards. + + This is used by both ClangCCompiler and ClangClCompiler, as they share + the same versions + """ + + _C17_VERSION = '>=6.0.0' + _C18_VERSION = '>=8.0.0' + _C2X_VERSION = '>=9.0.0' + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = super().get_options() + c_stds = ['c89', 'c99', 'c11'] + g_stds = ['gnu89', 'gnu99', 'gnu11'] + # https://releases.llvm.org/6.0.0/tools/clang/docs/ReleaseNotes.html + # https://en.wikipedia.org/wiki/Xcode#Latest_versions + if version_compare(self.version, self._C17_VERSION): + c_stds += ['c17'] + g_stds += ['gnu17'] + if version_compare(self.version, self._C18_VERSION): + c_stds += ['c18'] + g_stds += ['gnu18'] + if version_compare(self.version, self._C2X_VERSION): + c_stds += ['c2x'] + g_stds += ['gnu2x'] + opts[OptionKey('std', machine=self.for_machine, lang=self.language)].choices = ['none'] + c_stds + g_stds + return opts + + +class ClangCCompiler(_ClangCStds, ClangCompiler, CCompiler): + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, info, exe_wrapper, linker=linker, full_version=full_version) + ClangCompiler.__init__(self, defines) + default_warn_args = ['-Wall', '-Winvalid-pch'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic'], + 'everything': ['-Weverything']} + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = super().get_options() + if self.info.is_windows() or self.info.is_cygwin(): + opts.update({ + OptionKey('winlibs', machine=self.for_machine, lang=self.language): coredata.UserArrayOption( + 'Standard Win libraries to link against', + gnu_winlibs, + ), + }) + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] + if std.value != 'none': + args.append('-std=' + std.value) + return args + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + if self.info.is_windows() or self.info.is_cygwin(): + # without a typedict mypy can't understand this. + libs = options[OptionKey('winlibs', machine=self.for_machine, lang=self.language)].value.copy() + assert isinstance(libs, list) + for l in libs: + assert isinstance(l, str) + return libs + return [] + + +class ArmLtdClangCCompiler(ClangCCompiler): + + id = 'armltdclang' + + +class AppleClangCCompiler(ClangCCompiler): + + """Handle the differences between Apple Clang and Vanilla Clang. + + Right now this just handles the differences between the versions that new + C standards were added. + """ + + _C17_VERSION = '>=10.0.0' + _C18_VERSION = '>=11.0.0' + _C2X_VERSION = '>=11.0.0' + + +class EmscriptenCCompiler(EmscriptenMixin, ClangCCompiler): + + id = 'emscripten' + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + if not is_cross: + raise MesonException('Emscripten compiler can only be used for cross compilation.') + if not version_compare(version, '>=1.39.19'): + raise MesonException('Meson requires Emscripten >= 1.39.19') + ClangCCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper=exe_wrapper, linker=linker, + defines=defines, full_version=full_version) + + +class ArmclangCCompiler(ArmclangCompiler, CCompiler): + ''' + Keil armclang + ''' + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + ArmclangCompiler.__init__(self) + default_warn_args = ['-Wall', '-Winvalid-pch'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic'], + 'everything': ['-Weverything']} + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c90', 'c99', 'c11', 'gnu90', 'gnu99', 'gnu11'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] + if std.value != 'none': + args.append('-std=' + std.value) + return args + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + +class GnuCCompiler(GnuCompiler, CCompiler): + + _C18_VERSION = '>=8.0.0' + _C2X_VERSION = '>=9.0.0' + _INVALID_PCH_VERSION = ">=3.4.0" + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, info, exe_wrapper, linker=linker, full_version=full_version) + GnuCompiler.__init__(self, defines) + default_warn_args = ['-Wall'] + if version_compare(self.version, self._INVALID_PCH_VERSION): + default_warn_args += ['-Winvalid-pch'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic'], + 'everything': (default_warn_args + ['-Wextra', '-Wpedantic'] + + self.supported_warn_args(gnu_common_warning_args) + + self.supported_warn_args(gnu_c_warning_args))} + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CCompiler.get_options(self) + c_stds = ['c89', 'c99', 'c11'] + g_stds = ['gnu89', 'gnu99', 'gnu11'] + if version_compare(self.version, self._C18_VERSION): + c_stds += ['c17', 'c18'] + g_stds += ['gnu17', 'gnu18'] + if version_compare(self.version, self._C2X_VERSION): + c_stds += ['c2x'] + g_stds += ['gnu2x'] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none'] + c_stds + g_stds + if self.info.is_windows() or self.info.is_cygwin(): + opts.update({ + key.evolve('winlibs'): coredata.UserArrayOption( + 'Standard Win libraries to link against', + gnu_winlibs, + ), + }) + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + std = options[OptionKey('std', lang=self.language, machine=self.for_machine)] + if std.value != 'none': + args.append('-std=' + std.value) + return args + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + if self.info.is_windows() or self.info.is_cygwin(): + # without a typeddict mypy can't figure this out + libs: T.List[str] = options[OptionKey('winlibs', lang=self.language, machine=self.for_machine)].value.copy() + assert isinstance(libs, list) + for l in libs: + assert isinstance(l, str) + return libs + return [] + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + return ['-fpch-preprocess', '-include', os.path.basename(header)] + + +class PGICCompiler(PGICompiler, CCompiler): + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + PGICompiler.__init__(self) + + +class NvidiaHPC_CCompiler(PGICompiler, CCompiler): + + id = 'nvidia_hpc' + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + PGICompiler.__init__(self) + + +class ElbrusCCompiler(ElbrusCompiler, CCompiler): + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + ElbrusCompiler.__init__(self) + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CCompiler.get_options(self) + stds = ['c89', 'c9x', 'c99', 'gnu89', 'gnu9x', 'gnu99'] + stds += ['iso9899:1990', 'iso9899:199409', 'iso9899:1999'] + if version_compare(self.version, '>=1.20.00'): + stds += ['c11', 'gnu11'] + if version_compare(self.version, '>=1.21.00') and version_compare(self.version, '<1.22.00'): + stds += ['c90', 'c1x', 'gnu90', 'gnu1x', 'iso9899:2011'] + if version_compare(self.version, '>=1.23.00'): + stds += ['c90', 'c1x', 'gnu90', 'gnu1x', 'iso9899:2011'] + if version_compare(self.version, '>=1.26.00'): + stds += ['c17', 'c18', 'iso9899:2017', 'iso9899:2018', 'gnu17', 'gnu18'] + opts[OptionKey('std', machine=self.for_machine, lang=self.language)].choices = ['none'] + stds + return opts + + # Elbrus C compiler does not have lchmod, but there is only linker warning, not compiler error. + # So we should explicitly fail at this case. + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + if funcname == 'lchmod': + return False, False + else: + return super().has_function(funcname, prefix, env, + extra_args=extra_args, + dependencies=dependencies) + + +class IntelCCompiler(IntelGnuLikeCompiler, CCompiler): + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + IntelGnuLikeCompiler.__init__(self) + self.lang_header = 'c-header' + default_warn_args = ['-Wall', '-w3'] + self.warn_args = {'0': [], + '1': default_warn_args + ['-diag-disable:remark'], + '2': default_warn_args + ['-Wextra', '-diag-disable:remark'], + '3': default_warn_args + ['-Wextra', '-diag-disable:remark'], + 'everything': default_warn_args + ['-Wextra']} + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CCompiler.get_options(self) + c_stds = ['c89', 'c99'] + g_stds = ['gnu89', 'gnu99'] + if version_compare(self.version, '>=16.0.0'): + c_stds += ['c11'] + opts[OptionKey('std', machine=self.for_machine, lang=self.language)].choices = ['none'] + c_stds + g_stds + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] + if std.value != 'none': + args.append('-std=' + std.value) + return args + + +class IntelLLVMCCompiler(ClangCCompiler): + + id = 'intel-llvm' + + +class VisualStudioLikeCCompilerMixin(CompilerMixinBase): + + """Shared methods that apply to MSVC-like C compilers.""" + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = super().get_options() + opts.update({ + OptionKey('winlibs', machine=self.for_machine, lang=self.language): coredata.UserArrayOption( + 'Windows libs to link against.', + msvc_winlibs, + ), + }) + return opts + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + # need a TypeDict to make this work + key = OptionKey('winlibs', machine=self.for_machine, lang=self.language) + libs = options[key].value.copy() + assert isinstance(libs, list) + for l in libs: + assert isinstance(l, str) + return libs + + +class VisualStudioCCompiler(MSVCCompiler, VisualStudioLikeCCompilerMixin, CCompiler): + + _C11_VERSION = '>=19.28' + _C17_VERSION = '>=19.28' + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, + full_version=full_version) + MSVCCompiler.__init__(self, target) + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = super().get_options() + c_stds = ['c89', 'c99'] + # Need to have these to be compatible with projects + # that set c_std to e.g. gnu99. + # https://github.com/mesonbuild/meson/issues/7611 + g_stds = ['gnu89', 'gnu90', 'gnu9x', 'gnu99'] + if version_compare(self.version, self._C11_VERSION): + c_stds += ['c11'] + g_stds += ['gnu1x', 'gnu11'] + if version_compare(self.version, self._C17_VERSION): + c_stds += ['c17', 'c18'] + g_stds += ['gnu17', 'gnu18'] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none'] + c_stds + g_stds + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] + if std.value.startswith('gnu'): + mlog.log( + 'cl.exe does not actually support gnu standards, and meson ' + 'will instead demote to the nearest ISO C standard. This ' + 'may cause compilation to fail.', once=True) + # As of MVSC 16.8, /std:c11 and /std:c17 are the only valid C standard options. + if std.value in {'c11', 'gnu1x', 'gnu11'}: + args.append('/std:c11') + elif std.value in {'c17', 'c18', 'gnu17', 'gnu18'}: + args.append('/std:c17') + return args + + +class ClangClCCompiler(_ClangCStds, ClangClCompiler, VisualStudioLikeCCompilerMixin, CCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, [], exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, + full_version=full_version) + ClangClCompiler.__init__(self, target) + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key].value + if std != "none": + return [f'/clang:-std={std}'] + return [] + + +class IntelClCCompiler(IntelVisualStudioLikeCompiler, VisualStudioLikeCCompilerMixin, CCompiler): + + """Intel "ICL" compiler abstraction.""" + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, [], exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, + full_version=full_version) + IntelVisualStudioLikeCompiler.__init__(self, target) + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = super().get_options() + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c89', 'c99', 'c11'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value == 'c89': + mlog.log("ICL doesn't explicitly implement c89, setting the standard to 'none', which is close.", once=True) + elif std.value != 'none': + args.append('/Qstd:' + std.value) + return args + + +class IntelLLVMClCCompiler(IntelClCCompiler): + + id = 'intel-llvm-cl' + + +class ArmCCompiler(ArmCompiler, CCompiler): + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, + full_version=full_version) + ArmCompiler.__init__(self) + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c89', 'c99', 'c11'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append('--' + std.value) + return args + + +class CcrxCCompiler(CcrxCompiler, CCompiler): + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + CcrxCompiler.__init__(self) + + # Override CCompiler.get_always_args + def get_always_args(self) -> T.List[str]: + return ['-nologo'] + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c89', 'c99'] + return opts + + def get_no_stdinc_args(self) -> T.List[str]: + return [] + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value == 'c89': + args.append('-lang=c') + elif std.value == 'c99': + args.append('-lang=c99') + return args + + def get_compile_only_args(self) -> T.List[str]: + return [] + + def get_no_optimization_args(self) -> T.List[str]: + return ['-optimize=0'] + + def get_output_args(self, target: str) -> T.List[str]: + return [f'-output=obj={target}'] + + def get_werror_args(self) -> T.List[str]: + return ['-change_message=error'] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if path == '': + path = '.' + return ['-include=' + path] + + +class Xc16CCompiler(Xc16Compiler, CCompiler): + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + Xc16Compiler.__init__(self) + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c89', 'c99', 'gnu89', 'gnu99'] + return opts + + def get_no_stdinc_args(self) -> T.List[str]: + return [] + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append('-ansi') + args.append('-std=' + std.value) + return args + + def get_compile_only_args(self) -> T.List[str]: + return [] + + def get_no_optimization_args(self) -> T.List[str]: + return ['-O0'] + + def get_output_args(self, target: str) -> T.List[str]: + return [f'-o{target}'] + + def get_werror_args(self) -> T.List[str]: + return ['-change_message=error'] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if path == '': + path = '.' + return ['-I' + path] + +class CompCertCCompiler(CompCertCompiler, CCompiler): + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + CompCertCompiler.__init__(self) + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c89', 'c99'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + def get_no_optimization_args(self) -> T.List[str]: + return ['-O0'] + + def get_output_args(self, target: str) -> T.List[str]: + return [f'-o{target}'] + + def get_werror_args(self) -> T.List[str]: + return ['-Werror'] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if path == '': + path = '.' + return ['-I' + path] + +class TICCompiler(TICompiler, CCompiler): + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + TICompiler.__init__(self) + + # Override CCompiler.get_always_args + def get_always_args(self) -> T.List[str]: + return [] + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c89', 'c99', 'c11'] + return opts + + def get_no_stdinc_args(self) -> T.List[str]: + return [] + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append('--' + std.value) + return args + +class C2000CCompiler(TICCompiler): + # Required for backwards compat with projects created before ti-cgt support existed + id = 'c2000' + +class MetrowerksCCompilerARM(MetrowerksCompiler, CCompiler): + id = 'mwccarm' + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + MetrowerksCompiler.__init__(self) + + def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]: + return mwccarm_instruction_set_args.get(instruction_set, None) + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CCompiler.get_options(self) + c_stds = ['c99'] + opts[OptionKey('std', machine=self.for_machine, lang=self.language)].choices = ['none'] + c_stds + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] + if std.value != 'none': + args.append('-lang') + args.append(std.value) + return args + +class MetrowerksCCompilerEmbeddedPowerPC(MetrowerksCompiler, CCompiler): + id = 'mwcceppc' + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + MetrowerksCompiler.__init__(self) + + def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]: + return mwcceppc_instruction_set_args.get(instruction_set, None) + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CCompiler.get_options(self) + c_stds = ['c99'] + opts[OptionKey('std', machine=self.for_machine, lang=self.language)].choices = ['none'] + c_stds + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = [] + std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] + if std.value != 'none': + args.append('-lang ' + std.value) + return args diff --git a/vendored-meson/meson/mesonbuild/compilers/c_function_attributes.py b/vendored-meson/meson/mesonbuild/compilers/c_function_attributes.py new file mode 100644 index 000000000000..71ee9b22a75d --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/c_function_attributes.py @@ -0,0 +1,143 @@ +# These functions are based on the following code: +# https://git.savannah.gnu.org/gitweb/?p=autoconf-archive.git;a=blob_plain;f=m4/ax_gcc_func_attribute.m4, +# which is licensed under the following terms: +# +# Copyright (c) 2013 Gabriele Svelto +# +# Copying and distribution of this file, with or without modification, are +# permitted in any medium without royalty provided the copyright notice +# and this notice are preserved. This file is offered as-is, without any +# warranty. +# + +C_FUNC_ATTRIBUTES = { + 'alias': ''' + int foo(void) { return 0; } + int bar(void) __attribute__((alias("foo")));''', + 'aligned': + 'int foo(void) __attribute__((aligned(32)));', + 'alloc_size': + 'void *foo(int a) __attribute__((alloc_size(1)));', + 'always_inline': + 'inline __attribute__((always_inline)) int foo(void) { return 0; }', + 'artificial': + 'inline __attribute__((artificial)) int foo(void) { return 0; }', + 'cold': + 'int foo(void) __attribute__((cold));', + 'const': + 'int foo(void) __attribute__((const));', + 'constructor': + 'int foo(void) __attribute__((constructor));', + 'constructor_priority': + 'int foo( void ) __attribute__((__constructor__(65535/2)));', + 'deprecated': + 'int foo(void) __attribute__((deprecated("")));', + 'destructor': + 'int foo(void) __attribute__((destructor));', + 'dllexport': + '__declspec(dllexport) int foo(void) { return 0; }', + 'dllimport': + '__declspec(dllimport) int foo(void);', + 'error': + 'int foo(void) __attribute__((error("")));', + 'externally_visible': + 'int foo(void) __attribute__((externally_visible));', + 'fallthrough': ''' + int foo( void ) { + switch (0) { + case 1: __attribute__((fallthrough)); + case 2: break; + } + return 0; + };''', + 'flatten': + 'int foo(void) __attribute__((flatten));', + 'format': + 'int foo(const char * p, ...) __attribute__((format(printf, 1, 2)));', + 'format_arg': + 'char * foo(const char * p) __attribute__((format_arg(1)));', + 'force_align_arg_pointer': + '__attribute__((force_align_arg_pointer)) int foo(void) { return 0; }', + 'gnu_inline': + 'inline __attribute__((gnu_inline)) int foo(void) { return 0; }', + 'hot': + 'int foo(void) __attribute__((hot));', + 'ifunc': + ('int my_foo(void) { return 0; }' + 'static int (*resolve_foo(void))(void) { return my_foo; }' + 'int foo(void) __attribute__((ifunc("resolve_foo")));'), + 'leaf': + '__attribute__((leaf)) int foo(void) { return 0; }', + 'malloc': + 'int *foo(void) __attribute__((malloc));', + 'noclone': + 'int foo(void) __attribute__((noclone));', + 'noinline': + '__attribute__((noinline)) int foo(void) { return 0; }', + 'nonnull': + 'int foo(char * p) __attribute__((nonnull(1)));', + 'noreturn': + 'int foo(void) __attribute__((noreturn));', + 'nothrow': + 'int foo(void) __attribute__((nothrow));', + 'optimize': + '__attribute__((optimize(3))) int foo(void) { return 0; }', + 'packed': + 'struct __attribute__((packed)) foo { int bar; };', + 'pure': + 'int foo(void) __attribute__((pure));', + 'returns_nonnull': + 'int *foo(void) __attribute__((returns_nonnull));', + 'section': ''' + #if defined(__APPLE__) && defined(__MACH__) + extern int foo __attribute__((section("__BAR,__bar"))); + #else + extern int foo __attribute__((section(".bar"))); + #endif''', + 'sentinel': + 'int foo(const char *bar, ...) __attribute__((sentinel));', + 'unused': + 'int foo(void) __attribute__((unused));', + 'used': + 'int foo(void) __attribute__((used));', + 'vector_size': + '__attribute__((vector_size(32))); int foo(void) { return 0; }', + 'visibility': ''' + int foo_def(void) __attribute__((visibility("default"))); + int foo_hid(void) __attribute__((visibility("hidden"))); + int foo_int(void) __attribute__((visibility("internal")));''', + 'visibility:default': + 'int foo(void) __attribute__((visibility("default")));', + 'visibility:hidden': + 'int foo(void) __attribute__((visibility("hidden")));', + 'visibility:internal': + 'int foo(void) __attribute__((visibility("internal")));', + 'visibility:protected': + 'int foo(void) __attribute__((visibility("protected")));', + 'warning': + 'int foo(void) __attribute__((warning("")));', + 'warn_unused_result': + 'int foo(void) __attribute__((warn_unused_result));', + 'weak': + 'int foo(void) __attribute__((weak));', + 'weakref': ''' + static int foo(void) { return 0; } + static int var(void) __attribute__((weakref("foo")));''', + 'retain': '__attribute__((retain)) int x;', +} + +CXX_FUNC_ATTRIBUTES = { + # Alias must be applied to the mangled name in C++ + 'alias': + ('extern "C" {' + 'int foo(void) { return 0; }' + '}' + 'int bar(void) __attribute__((alias("foo")));' + ), + 'ifunc': + ('extern "C" {' + 'int my_foo(void) { return 0; }' + 'static int (*resolve_foo(void))(void) { return my_foo; }' + '}' + 'int foo(void) __attribute__((ifunc("resolve_foo")));'), +} diff --git a/vendored-meson/meson/mesonbuild/compilers/compilers.py b/vendored-meson/meson/mesonbuild/compilers/compilers.py new file mode 100644 index 000000000000..cb8eae56ac4e --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/compilers.py @@ -0,0 +1,1403 @@ +# Copyright 2012-2022 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import abc +import contextlib, os.path, re +import enum +import itertools +import typing as T +from functools import lru_cache + +from .. import coredata +from .. import mlog +from .. import mesonlib +from ..mesonlib import ( + HoldableObject, + EnvironmentException, MesonException, + Popen_safe_logged, LibType, TemporaryDirectoryWinProof, OptionKey, +) + +from ..arglist import CompilerArgs + +if T.TYPE_CHECKING: + from ..build import BuildTarget + from ..coredata import MutableKeyedOptionDictType, KeyedOptionDictType + from ..envconfig import MachineInfo + from ..environment import Environment + from ..linkers import RSPFileSyntax + from ..linkers.linkers import DynamicLinker + from ..mesonlib import MachineChoice + from ..dependencies import Dependency + + CompilerType = T.TypeVar('CompilerType', bound='Compiler') + _T = T.TypeVar('_T') + +"""This file contains the data files of all compilers Meson knows +about. To support a new compiler, add its information below. +Also add corresponding autodetection code in detect.py.""" + +header_suffixes = {'h', 'hh', 'hpp', 'hxx', 'H', 'ipp', 'moc', 'vapi', 'di'} +obj_suffixes = {'o', 'obj', 'res'} +# To the emscripten compiler, .js files are libraries +lib_suffixes = {'a', 'lib', 'dll', 'dll.a', 'dylib', 'so', 'js'} +# Mapping of language to suffixes of files that should always be in that language +# This means we can't include .h headers here since they could be C, C++, ObjC, etc. +# First suffix is the language's default. +lang_suffixes = { + 'c': ('c',), + 'cpp': ('cpp', 'cc', 'cxx', 'c++', 'hh', 'hpp', 'ipp', 'hxx', 'ino', 'ixx', 'C'), + 'cuda': ('cu',), + # f90, f95, f03, f08 are for free-form fortran ('f90' recommended) + # f, for, ftn, fpp are for fixed-form fortran ('f' or 'for' recommended) + 'fortran': ('f90', 'f95', 'f03', 'f08', 'f', 'for', 'ftn', 'fpp'), + 'd': ('d', 'di'), + 'objc': ('m',), + 'objcpp': ('mm',), + 'rust': ('rs',), + 'vala': ('vala', 'vapi', 'gs'), + 'cs': ('cs',), + 'swift': ('swift',), + 'java': ('java',), + 'cython': ('pyx', ), + 'nasm': ('asm',), + 'masm': ('masm',), +} +all_languages = lang_suffixes.keys() +c_cpp_suffixes = {'h'} +cpp_suffixes = set(lang_suffixes['cpp']) | c_cpp_suffixes +c_suffixes = set(lang_suffixes['c']) | c_cpp_suffixes +assembler_suffixes = {'s', 'S', 'sx', 'asm', 'masm'} +llvm_ir_suffixes = {'ll'} +all_suffixes = set(itertools.chain(*lang_suffixes.values(), assembler_suffixes, llvm_ir_suffixes, c_cpp_suffixes)) +source_suffixes = all_suffixes - header_suffixes +# List of languages that by default consume and output libraries following the +# C ABI; these can generally be used interchangeably +# This must be sorted, see sort_clink(). +clib_langs = ('objcpp', 'cpp', 'objc', 'c', 'nasm', 'fortran') +# List of languages that can be linked with C code directly by the linker +# used in build.py:process_compilers() and build.py:get_dynamic_linker() +# This must be sorted, see sort_clink(). +clink_langs = ('d', 'cuda') + clib_langs + +SUFFIX_TO_LANG = dict(itertools.chain(*( + [(suffix, lang) for suffix in v] for lang, v in lang_suffixes.items()))) + +# Languages that should use LDFLAGS arguments when linking. +LANGUAGES_USING_LDFLAGS = {'objcpp', 'cpp', 'objc', 'c', 'fortran', 'd', 'cuda'} +# Languages that should use CPPFLAGS arguments when linking. +LANGUAGES_USING_CPPFLAGS = {'c', 'cpp', 'objc', 'objcpp'} +soregex = re.compile(r'.*\.so(\.[0-9]+)?(\.[0-9]+)?(\.[0-9]+)?$') + +# Environment variables that each lang uses. +CFLAGS_MAPPING: T.Mapping[str, str] = { + 'c': 'CFLAGS', + 'cpp': 'CXXFLAGS', + 'cuda': 'CUFLAGS', + 'objc': 'OBJCFLAGS', + 'objcpp': 'OBJCXXFLAGS', + 'fortran': 'FFLAGS', + 'd': 'DFLAGS', + 'vala': 'VALAFLAGS', + 'rust': 'RUSTFLAGS', + 'cython': 'CYTHONFLAGS', + 'cs': 'CSFLAGS', # This one might not be standard. +} + +# All these are only for C-linkable languages; see `clink_langs` above. + +def sort_clink(lang: str) -> int: + ''' + Sorting function to sort the list of languages according to + reversed(compilers.clink_langs) and append the unknown langs in the end. + The purpose is to prefer C over C++ for files that can be compiled by + both such as assembly, C, etc. Also applies to ObjC, ObjC++, etc. + ''' + if lang not in clink_langs: + return 1 + return -clink_langs.index(lang) + +def is_header(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): + fname = fname.fname + suffix = fname.split('.')[-1] + return suffix in header_suffixes + +def is_source_suffix(suffix: str) -> bool: + return suffix in source_suffixes + +def is_source(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): + fname = fname.fname + suffix = fname.split('.')[-1].lower() + return is_source_suffix(suffix) + +def is_assembly(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): + fname = fname.fname + suffix = fname.split('.')[-1] + return suffix in assembler_suffixes + +def is_llvm_ir(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): + fname = fname.fname + suffix = fname.split('.')[-1] + return suffix in llvm_ir_suffixes + +@lru_cache(maxsize=None) +def cached_by_name(fname: 'mesonlib.FileOrString') -> bool: + suffix = fname.split('.')[-1] + return suffix in obj_suffixes + +def is_object(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): + fname = fname.fname + return cached_by_name(fname) + +def is_library(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): + fname = fname.fname + + if soregex.match(fname): + return True + + suffix = fname.split('.')[-1] + return suffix in lib_suffixes + +def is_known_suffix(fname: 'mesonlib.FileOrString') -> bool: + if isinstance(fname, mesonlib.File): + fname = fname.fname + suffix = fname.split('.')[-1] + + return suffix in all_suffixes + + +class CompileCheckMode(enum.Enum): + + PREPROCESS = 'preprocess' + COMPILE = 'compile' + LINK = 'link' + + +cuda_buildtype_args: T.Dict[str, T.List[str]] = { + 'plain': [], + 'debug': ['-g', '-G'], + 'debugoptimized': ['-g', '-lineinfo'], + 'release': [], + 'minsize': [], + 'custom': [], +} + +java_buildtype_args: T.Dict[str, T.List[str]] = { + 'plain': [], + 'debug': ['-g'], + 'debugoptimized': ['-g'], + 'release': [], + 'minsize': [], + 'custom': [], +} + +rust_buildtype_args: T.Dict[str, T.List[str]] = { + 'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + 'custom': [], +} + +d_gdc_buildtype_args: T.Dict[str, T.List[str]] = { + 'plain': [], + 'debug': [], + 'debugoptimized': ['-finline-functions'], + 'release': ['-finline-functions'], + 'minsize': [], + 'custom': [], +} + +d_ldc_buildtype_args: T.Dict[str, T.List[str]] = { + 'plain': [], + 'debug': [], + 'debugoptimized': ['-enable-inlining', '-Hkeep-all-bodies'], + 'release': ['-enable-inlining', '-Hkeep-all-bodies'], + 'minsize': [], + 'custom': [], +} + +d_dmd_buildtype_args: T.Dict[str, T.List[str]] = { + 'plain': [], + 'debug': [], + 'debugoptimized': ['-inline'], + 'release': ['-inline'], + 'minsize': [], + 'custom': [], +} + +mono_buildtype_args: T.Dict[str, T.List[str]] = { + 'plain': [], + 'debug': [], + 'debugoptimized': ['-optimize+'], + 'release': ['-optimize+'], + 'minsize': [], + 'custom': [], +} + +swift_buildtype_args: T.Dict[str, T.List[str]] = { + 'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + 'custom': [], +} + +gnu_winlibs = ['-lkernel32', '-luser32', '-lgdi32', '-lwinspool', '-lshell32', + '-lole32', '-loleaut32', '-luuid', '-lcomdlg32', '-ladvapi32'] + +msvc_winlibs = ['kernel32.lib', 'user32.lib', 'gdi32.lib', + 'winspool.lib', 'shell32.lib', 'ole32.lib', 'oleaut32.lib', + 'uuid.lib', 'comdlg32.lib', 'advapi32.lib'] + +clike_optimization_args: T.Dict[str, T.List[str]] = { + 'plain': [], + '0': [], + 'g': [], + '1': ['-O1'], + '2': ['-O2'], + '3': ['-O3'], + 's': ['-Os'], +} + +cuda_optimization_args: T.Dict[str, T.List[str]] = { + 'plain': [], + '0': [], + 'g': ['-O0'], + '1': ['-O1'], + '2': ['-O2'], + '3': ['-O3'], + 's': ['-O3'] +} + +cuda_debug_args: T.Dict[bool, T.List[str]] = { + False: [], + True: ['-g'] +} + +clike_debug_args: T.Dict[bool, T.List[str]] = { + False: [], + True: ['-g'] +} + +base_options: 'KeyedOptionDictType' = { + OptionKey('b_pch'): coredata.UserBooleanOption('Use precompiled headers', True), + OptionKey('b_lto'): coredata.UserBooleanOption('Use link time optimization', False), + OptionKey('b_lto_threads'): coredata.UserIntegerOption('Use multiple threads for Link Time Optimization', (None, None, 0)), + OptionKey('b_lto_mode'): coredata.UserComboOption('Select between different LTO modes.', + ['default', 'thin'], + 'default'), + OptionKey('b_thinlto_cache'): coredata.UserBooleanOption('Use LLVM ThinLTO caching for faster incremental builds', False), + OptionKey('b_thinlto_cache_dir'): coredata.UserStringOption('Directory to store ThinLTO cache objects', ''), + OptionKey('b_sanitize'): coredata.UserComboOption('Code sanitizer to use', + ['none', 'address', 'thread', 'undefined', 'memory', 'leak', 'address,undefined'], + 'none'), + OptionKey('b_lundef'): coredata.UserBooleanOption('Use -Wl,--no-undefined when linking', True), + OptionKey('b_asneeded'): coredata.UserBooleanOption('Use -Wl,--as-needed when linking', True), + OptionKey('b_pgo'): coredata.UserComboOption('Use profile guided optimization', + ['off', 'generate', 'use'], + 'off'), + OptionKey('b_coverage'): coredata.UserBooleanOption('Enable coverage tracking.', False), + OptionKey('b_colorout'): coredata.UserComboOption('Use colored output', + ['auto', 'always', 'never'], + 'always'), + OptionKey('b_ndebug'): coredata.UserComboOption('Disable asserts', ['true', 'false', 'if-release'], 'false'), + OptionKey('b_staticpic'): coredata.UserBooleanOption('Build static libraries as position independent', True), + OptionKey('b_pie'): coredata.UserBooleanOption('Build executables as position independent', False), + OptionKey('b_bitcode'): coredata.UserBooleanOption('Generate and embed bitcode (only macOS/iOS/tvOS)', False), + OptionKey('b_vscrt'): coredata.UserComboOption('VS run-time library type to use.', + ['none', 'md', 'mdd', 'mt', 'mtd', 'from_buildtype', 'static_from_buildtype'], + 'from_buildtype'), +} + +def option_enabled(boptions: T.Set[OptionKey], options: 'KeyedOptionDictType', + option: OptionKey) -> bool: + try: + if option not in boptions: + return False + ret = options[option].value + assert isinstance(ret, bool), 'must return bool' # could also be str + return ret + except KeyError: + return False + + +def get_option_value(options: 'KeyedOptionDictType', opt: OptionKey, fallback: '_T') -> '_T': + """Get the value of an option, or the fallback value.""" + try: + v: '_T' = options[opt].value + except KeyError: + return fallback + + assert isinstance(v, type(fallback)), f'Should have {type(fallback)!r} but was {type(v)!r}' + # Mypy doesn't understand that the above assert ensures that v is type _T + return v + + +def are_asserts_disabled(options: KeyedOptionDictType) -> bool: + """Should debug assertions be disabled + + :param options: OptionDictionary + :return: whether to disable assertions or not + """ + return (options[OptionKey('b_ndebug')].value == 'true' or + (options[OptionKey('b_ndebug')].value == 'if-release' and + options[OptionKey('buildtype')].value in {'release', 'plain'})) + + +def get_base_compile_args(options: 'KeyedOptionDictType', compiler: 'Compiler') -> T.List[str]: + args: T.List[str] = [] + try: + if options[OptionKey('b_lto')].value: + args.extend(compiler.get_lto_compile_args( + threads=get_option_value(options, OptionKey('b_lto_threads'), 0), + mode=get_option_value(options, OptionKey('b_lto_mode'), 'default'))) + except KeyError: + pass + try: + args += compiler.get_colorout_args(options[OptionKey('b_colorout')].value) + except KeyError: + pass + try: + args += compiler.sanitizer_compile_args(options[OptionKey('b_sanitize')].value) + except KeyError: + pass + try: + pgo_val = options[OptionKey('b_pgo')].value + if pgo_val == 'generate': + args.extend(compiler.get_profile_generate_args()) + elif pgo_val == 'use': + args.extend(compiler.get_profile_use_args()) + except KeyError: + pass + try: + if options[OptionKey('b_coverage')].value: + args += compiler.get_coverage_args() + except KeyError: + pass + try: + args += compiler.get_assert_args(are_asserts_disabled(options)) + except KeyError: + pass + # This does not need a try...except + if option_enabled(compiler.base_options, options, OptionKey('b_bitcode')): + args.append('-fembed-bitcode') + try: + crt_val = options[OptionKey('b_vscrt')].value + buildtype = options[OptionKey('buildtype')].value + try: + args += compiler.get_crt_compile_args(crt_val, buildtype) + except AttributeError: + pass + except KeyError: + pass + return args + +def get_base_link_args(options: 'KeyedOptionDictType', linker: 'Compiler', + is_shared_module: bool, build_dir: str) -> T.List[str]: + args: T.List[str] = [] + try: + if options[OptionKey('b_lto')].value: + thinlto_cache_dir = None + if get_option_value(options, OptionKey('b_thinlto_cache'), False): + thinlto_cache_dir = get_option_value(options, OptionKey('b_thinlto_cache_dir'), '') + if thinlto_cache_dir == '': + thinlto_cache_dir = os.path.join(build_dir, 'meson-private', 'thinlto-cache') + args.extend(linker.get_lto_link_args( + threads=get_option_value(options, OptionKey('b_lto_threads'), 0), + mode=get_option_value(options, OptionKey('b_lto_mode'), 'default'), + thinlto_cache_dir=thinlto_cache_dir)) + except KeyError: + pass + try: + args += linker.sanitizer_link_args(options[OptionKey('b_sanitize')].value) + except KeyError: + pass + try: + pgo_val = options[OptionKey('b_pgo')].value + if pgo_val == 'generate': + args.extend(linker.get_profile_generate_args()) + elif pgo_val == 'use': + args.extend(linker.get_profile_use_args()) + except KeyError: + pass + try: + if options[OptionKey('b_coverage')].value: + args += linker.get_coverage_link_args() + except KeyError: + pass + + as_needed = option_enabled(linker.base_options, options, OptionKey('b_asneeded')) + bitcode = option_enabled(linker.base_options, options, OptionKey('b_bitcode')) + # Shared modules cannot be built with bitcode_bundle because + # -bitcode_bundle is incompatible with -undefined and -bundle + if bitcode and not is_shared_module: + args.extend(linker.bitcode_args()) + elif as_needed: + # -Wl,-dead_strip_dylibs is incompatible with bitcode + args.extend(linker.get_asneeded_args()) + + # Apple's ld (the only one that supports bitcode) does not like -undefined + # arguments or -headerpad_max_install_names when bitcode is enabled + if not bitcode: + args.extend(linker.headerpad_args()) + if (not is_shared_module and + option_enabled(linker.base_options, options, OptionKey('b_lundef'))): + args.extend(linker.no_undefined_link_args()) + else: + args.extend(linker.get_allow_undefined_link_args()) + + try: + crt_val = options[OptionKey('b_vscrt')].value + buildtype = options[OptionKey('buildtype')].value + try: + args += linker.get_crt_link_args(crt_val, buildtype) + except AttributeError: + pass + except KeyError: + pass + return args + + +class CrossNoRunException(MesonException): + pass + +class RunResult(HoldableObject): + def __init__(self, compiled: bool, returncode: int = 999, + stdout: str = 'UNDEFINED', stderr: str = 'UNDEFINED', + cached: bool = False): + self.compiled = compiled + self.returncode = returncode + self.stdout = stdout + self.stderr = stderr + self.cached = cached + + +class CompileResult(HoldableObject): + + """The result of Compiler.compiles (and friends).""" + + def __init__(self, stdo: T.Optional[str] = None, stde: T.Optional[str] = None, + command: T.Optional[T.List[str]] = None, + returncode: int = 999, + input_name: T.Optional[str] = None, + output_name: T.Optional[str] = None, + cached: bool = False): + self.stdout = stdo + self.stderr = stde + self.input_name = input_name + self.output_name = output_name + self.command = command or [] + self.cached = cached + self.returncode = returncode + + +class Compiler(HoldableObject, metaclass=abc.ABCMeta): + # Libraries to ignore in find_library() since they are provided by the + # compiler or the C library. Currently only used for MSVC. + ignore_libs: T.List[str] = [] + # Libraries that are internal compiler implementations, and must not be + # manually searched. + internal_libs: T.List[str] = [] + + LINKER_PREFIX: T.Union[None, str, T.List[str]] = None + INVOKES_LINKER = True + + language: str + id: str + warn_args: T.Dict[str, T.List[str]] + mode = CompileCheckMode.COMPILE + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, + for_machine: MachineChoice, info: 'MachineInfo', + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, is_cross: bool = False): + self.exelist = ccache + exelist + self.exelist_no_ccache = exelist + # In case it's been overridden by a child class already + if not hasattr(self, 'file_suffixes'): + self.file_suffixes = lang_suffixes[self.language] + if not hasattr(self, 'can_compile_suffixes'): + self.can_compile_suffixes: T.Set[str] = set(self.file_suffixes) + self.default_suffix = self.file_suffixes[0] + self.version = version + self.full_version = full_version + self.for_machine = for_machine + self.base_options: T.Set[OptionKey] = set() + self.linker = linker + self.info = info + self.is_cross = is_cross + self.modes: T.List[Compiler] = [] + + def __repr__(self) -> str: + repr_str = "<{0}: v{1} `{2}`>" + return repr_str.format(self.__class__.__name__, self.version, + ' '.join(self.exelist)) + + @lru_cache(maxsize=None) + def can_compile(self, src: 'mesonlib.FileOrString') -> bool: + if isinstance(src, mesonlib.File): + src = src.fname + suffix = os.path.splitext(src)[1] + if suffix != '.C': + suffix = suffix.lower() + return bool(suffix) and suffix[1:] in self.can_compile_suffixes + + def get_id(self) -> str: + return self.id + + def get_modes(self) -> T.List[Compiler]: + return self.modes + + def get_linker_id(self) -> str: + # There is not guarantee that we have a dynamic linker instance, as + # some languages don't have separate linkers and compilers. In those + # cases return the compiler id + try: + return self.linker.id + except AttributeError: + return self.id + + def get_version_string(self) -> str: + details = [self.id, self.version] + if self.full_version: + details += ['"%s"' % (self.full_version)] + return '(%s)' % (' '.join(details)) + + def get_language(self) -> str: + return self.language + + @classmethod + def get_display_language(cls) -> str: + return cls.language.capitalize() + + def get_default_suffix(self) -> str: + return self.default_suffix + + def get_define(self, dname: str, prefix: str, env: 'Environment', + extra_args: T.Union[T.List[str], T.Callable[[CompileCheckMode], T.List[str]]], + dependencies: T.List['Dependency'], + disable_cache: bool = False) -> T.Tuple[str, bool]: + raise EnvironmentException('%s does not support get_define ' % self.get_id()) + + def compute_int(self, expression: str, low: T.Optional[int], high: T.Optional[int], + guess: T.Optional[int], prefix: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]], + dependencies: T.Optional[T.List['Dependency']]) -> int: + raise EnvironmentException('%s does not support compute_int ' % self.get_id()) + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + raise EnvironmentException('%s does not support compute_parameters_with_absolute_paths ' % self.get_id()) + + def has_members(self, typename: str, membernames: T.List[str], + prefix: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + raise EnvironmentException('%s does not support has_member(s) ' % self.get_id()) + + def has_type(self, typename: str, prefix: str, env: 'Environment', + extra_args: T.Union[T.List[str], T.Callable[[CompileCheckMode], T.List[str]]], *, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + raise EnvironmentException('%s does not support has_type ' % self.get_id()) + + def symbols_have_underscore_prefix(self, env: 'Environment') -> bool: + raise EnvironmentException('%s does not support symbols_have_underscore_prefix ' % self.get_id()) + + def get_exelist(self, ccache: bool = True) -> T.List[str]: + return self.exelist.copy() if ccache else self.exelist_no_ccache.copy() + + def get_linker_exelist(self) -> T.List[str]: + return self.linker.get_exelist() if self.linker else self.get_exelist() + + @abc.abstractmethod + def get_output_args(self, outputname: str) -> T.List[str]: + pass + + def get_linker_output_args(self, outputname: str) -> T.List[str]: + return self.linker.get_output_args(outputname) + + def get_linker_search_args(self, dirname: str) -> T.List[str]: + return self.linker.get_search_args(dirname) + + def get_builtin_define(self, define: str) -> T.Optional[str]: + raise EnvironmentException('%s does not support get_builtin_define.' % self.id) + + def has_builtin_define(self, define: str) -> bool: + raise EnvironmentException('%s does not support has_builtin_define.' % self.id) + + def get_always_args(self) -> T.List[str]: + return [] + + def can_linker_accept_rsp(self) -> bool: + """ + Determines whether the linker can accept arguments using the @rsp syntax. + """ + return self.linker.get_accepts_rsp() + + def get_linker_always_args(self) -> T.List[str]: + return self.linker.get_always_args() + + def get_linker_lib_prefix(self) -> str: + return self.linker.get_lib_prefix() + + def gen_import_library_args(self, implibname: str) -> T.List[str]: + """ + Used only on Windows for libraries that need an import library. + This currently means C, C++, Fortran. + """ + return [] + + def get_options(self) -> 'MutableKeyedOptionDictType': + return {} + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return self.linker.get_option_args(options) + + def check_header(self, hname: str, prefix: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + """Check that header is usable. + + Returns a two item tuple of bools. The first bool is whether the + check succeeded, the second is whether the result was cached (True) + or run fresh (False). + """ + raise EnvironmentException('Language %s does not support header checks.' % self.get_display_language()) + + def has_header(self, hname: str, prefix: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + disable_cache: bool = False) -> T.Tuple[bool, bool]: + """Check that header is exists. + + This check will return true if the file exists, even if it contains: + + ```c + # error "You thought you could use this, LOLZ!" + ``` + + Use check_header if your header only works in some cases. + + Returns a two item tuple of bools. The first bool is whether the + check succeeded, the second is whether the result was cached (True) + or run fresh (False). + """ + raise EnvironmentException('Language %s does not support header checks.' % self.get_display_language()) + + def has_header_symbol(self, hname: str, symbol: str, prefix: str, + env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + raise EnvironmentException('Language %s does not support header symbol checks.' % self.get_display_language()) + + def run(self, code: 'mesonlib.FileOrString', env: 'Environment', *, + extra_args: T.Union[T.List[str], T.Callable[[CompileCheckMode], T.List[str]], None] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> RunResult: + raise EnvironmentException('Language %s does not support run checks.' % self.get_display_language()) + + # Caching run() in general seems too risky (no way to know what the program + # depends on), but some callers know more about the programs they intend to + # run. + # For now we just accept code as a string, as that's what internal callers + # need anyway. If we wanted to accept files, the cache key would need to + # include mtime. + def cached_run(self, code: str, env: 'Environment', *, + extra_args: T.Union[T.List[str], T.Callable[[CompileCheckMode], T.List[str]], None] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> RunResult: + run_check_cache = env.coredata.run_check_cache + args = self.build_wrapper_args(env, extra_args, dependencies, CompileCheckMode('link')) + key = (code, tuple(args)) + if key in run_check_cache: + p = run_check_cache[key] + p.cached = True + mlog.debug('Using cached run result:') + mlog.debug('Code:\n', code) + mlog.debug('Args:\n', extra_args) + mlog.debug('Cached run returncode:\n', p.returncode) + mlog.debug('Cached run stdout:\n', p.stdout) + mlog.debug('Cached run stderr:\n', p.stderr) + else: + p = self.run(code, env, extra_args=extra_args, dependencies=dependencies) + run_check_cache[key] = p + return p + + def sizeof(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[int, bool]: + raise EnvironmentException('Language %s does not support sizeof checks.' % self.get_display_language()) + + def alignment(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[int, bool]: + raise EnvironmentException('Language %s does not support alignment checks.' % self.get_display_language()) + + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + """See if a function exists. + + Returns a two item tuple of bools. The first bool is whether the + check succeeded, the second is whether the result was cached (True) + or run fresh (False). + """ + raise EnvironmentException('Language %s does not support function checks.' % self.get_display_language()) + + @classmethod + def _unix_args_to_native(cls, args: T.List[str], info: MachineInfo) -> T.List[str]: + "Always returns a copy that can be independently mutated" + return args.copy() + + def unix_args_to_native(self, args: T.List[str]) -> T.List[str]: + return self._unix_args_to_native(args, self.info) + + @classmethod + def native_args_to_unix(cls, args: T.List[str]) -> T.List[str]: + "Always returns a copy that can be independently mutated" + return args.copy() + + def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], + libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]: + raise EnvironmentException(f'Language {self.get_display_language()} does not support library finding.') + + def get_library_naming(self, env: 'Environment', libtype: LibType, + strict: bool = False) -> T.Optional[T.Tuple[str, ...]]: + raise EnvironmentException( + 'Language {} does not support get_library_naming.'.format( + self.get_display_language())) + + def get_program_dirs(self, env: 'Environment') -> T.List[str]: + return [] + + def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + raise EnvironmentException( + 'Language {} does not support has_multi_arguments.'.format( + self.get_display_language())) + + def has_multi_link_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + return self.linker.has_multi_arguments(args, env) + + def _get_compile_output(self, dirname: str, mode: CompileCheckMode) -> str: + assert mode != CompileCheckMode.PREPROCESS, 'In pre-processor mode, the output is sent to stdout and discarded' + # Extension only matters if running results; '.exe' is + # guaranteed to be executable on every platform. + if mode == CompileCheckMode.LINK: + suffix = 'exe' + else: + suffix = 'obj' + return os.path.join(dirname, 'output.' + suffix) + + def get_compiler_args_for_mode(self, mode: CompileCheckMode) -> T.List[str]: + args: T.List[str] = [] + args += self.get_always_args() + if mode is CompileCheckMode.COMPILE: + args += self.get_compile_only_args() + elif mode is CompileCheckMode.PREPROCESS: + args += self.get_preprocess_only_args() + else: + assert mode is CompileCheckMode.LINK + return args + + def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs: + """Return an appropriate CompilerArgs instance for this class.""" + return CompilerArgs(self, args) + + @contextlib.contextmanager + def compile(self, code: 'mesonlib.FileOrString', + extra_args: T.Union[None, CompilerArgs, T.List[str]] = None, + *, mode: CompileCheckMode = CompileCheckMode.LINK, want_output: bool = False, + temp_dir: T.Optional[str] = None) -> T.Iterator[T.Optional[CompileResult]]: + # TODO: there isn't really any reason for this to be a contextmanager + + if mode == CompileCheckMode.PREPROCESS: + assert not want_output, 'In pre-processor mode, the output is sent to stdout and discarded' + + if extra_args is None: + extra_args = [] + + with TemporaryDirectoryWinProof(dir=temp_dir) as tmpdirname: + no_ccache = False + if isinstance(code, str): + srcname = os.path.join(tmpdirname, + 'testfile.' + self.default_suffix) + with open(srcname, 'w', encoding='utf-8') as ofile: + ofile.write(code) + # ccache would result in a cache miss + no_ccache = True + contents = code + else: + srcname = code.fname + if not is_object(code.fname): + with open(code.fname, encoding='utf-8') as f: + contents = f.read() + else: + contents = '' + + # Construct the compiler command-line + commands = self.compiler_args() + commands.append(srcname) + + # Preprocess mode outputs to stdout, so no output args + if mode != CompileCheckMode.PREPROCESS: + output = self._get_compile_output(tmpdirname, mode) + commands += self.get_output_args(output) + commands.extend(self.get_compiler_args_for_mode(CompileCheckMode(mode))) + + # extra_args must be last because it could contain '/link' to + # pass args to VisualStudio's linker. In that case everything + # in the command line after '/link' is given to the linker. + if extra_args: + commands += extra_args + # Generate full command-line with the exelist + command_list = self.get_exelist(ccache=not no_ccache) + commands.to_native() + mlog.debug('Running compile:') + mlog.debug('Working directory: ', tmpdirname) + mlog.debug('Code:\n', contents) + os_env = os.environ.copy() + os_env['LC_ALL'] = 'C' + if no_ccache: + os_env['CCACHE_DISABLE'] = '1' + p, stdo, stde = Popen_safe_logged(command_list, msg='Command line', cwd=tmpdirname, env=os_env) + + result = CompileResult(stdo, stde, command_list, p.returncode, input_name=srcname) + if want_output: + result.output_name = output + yield result + + @contextlib.contextmanager + def cached_compile(self, code: 'mesonlib.FileOrString', cdata: coredata.CoreData, *, + extra_args: T.Union[None, T.List[str], CompilerArgs] = None, + mode: CompileCheckMode = CompileCheckMode.LINK, + temp_dir: T.Optional[str] = None) -> T.Iterator[T.Optional[CompileResult]]: + # TODO: There's isn't really any reason for this to be a context manager + + # Calculate the key + textra_args: T.Tuple[str, ...] = tuple(extra_args) if extra_args is not None else tuple() + key: coredata.CompilerCheckCacheKey = (tuple(self.exelist), self.version, code, textra_args, mode) + + # Check if not cached, and generate, otherwise get from the cache + if key in cdata.compiler_check_cache: + p = cdata.compiler_check_cache[key] + p.cached = True + mlog.debug('Using cached compile:') + mlog.debug('Cached command line: ', ' '.join(p.command), '\n') + mlog.debug('Code:\n', code) + mlog.debug('Cached compiler stdout:\n', p.stdout) + mlog.debug('Cached compiler stderr:\n', p.stderr) + yield p + else: + with self.compile(code, extra_args=extra_args, mode=mode, want_output=False, temp_dir=temp_dir) as p: + cdata.compiler_check_cache[key] = p + yield p + + def get_colorout_args(self, colortype: str) -> T.List[str]: + # TODO: colortype can probably be an emum + return [] + + # Some compilers (msvc) write debug info to a separate file. + # These args specify where it should be written. + def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[str]: + return [] + + def get_link_debugfile_name(self, targetfile: str) -> T.Optional[str]: + return self.linker.get_debugfile_name(targetfile) + + def get_link_debugfile_args(self, targetfile: str) -> T.List[str]: + return self.linker.get_debugfile_args(targetfile) + + def get_std_shared_lib_link_args(self) -> T.List[str]: + return self.linker.get_std_shared_lib_args() + + def get_std_shared_module_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return self.linker.get_std_shared_module_args(options) + + def get_link_whole_for(self, args: T.List[str]) -> T.List[str]: + return self.linker.get_link_whole_for(args) + + def get_allow_undefined_link_args(self) -> T.List[str]: + return self.linker.get_allow_undefined_args() + + def no_undefined_link_args(self) -> T.List[str]: + return self.linker.no_undefined_args() + + def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]: + """Compiler arguments needed to enable the given instruction set. + + Return type ay be an empty list meaning nothing needed or None + meaning the given set is not supported. + """ + return None + + def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, + rpath_paths: T.Tuple[str, ...], build_rpath: str, + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: + return self.linker.build_rpath_args( + env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) + + def get_archive_name(self, filename: str) -> str: + return self.linker.get_archive_name(filename) + + def get_command_to_archive_shlib(self) -> T.List[str]: + return self.linker.get_command_to_archive_shlib() + + def thread_flags(self, env: 'Environment') -> T.List[str]: + return [] + + def thread_link_flags(self, env: 'Environment') -> T.List[str]: + return self.linker.thread_flags(env) + + def openmp_flags(self) -> T.List[str]: + raise EnvironmentException('Language %s does not support OpenMP flags.' % self.get_display_language()) + + def openmp_link_flags(self) -> T.List[str]: + return self.openmp_flags() + + def language_stdlib_only_link_flags(self, env: 'Environment') -> T.List[str]: + return [] + + def gnu_symbol_visibility_args(self, vistype: str) -> T.List[str]: + return [] + + def get_gui_app_args(self, value: bool) -> T.List[str]: + # Only used on Windows + return self.linker.get_gui_app_args(value) + + def get_win_subsystem_args(self, value: str) -> T.List[str]: + # By default the dynamic linker is going to return an empty + # array in case it either doesn't support Windows subsystems + # or does not target Windows + return self.linker.get_win_subsystem_args(value) + + def has_func_attribute(self, name: str, env: 'Environment') -> T.Tuple[bool, bool]: + raise EnvironmentException( + f'Language {self.get_display_language()} does not support function attributes.') + + def get_pic_args(self) -> T.List[str]: + m = 'Language {} does not support position-independent code' + raise EnvironmentException(m.format(self.get_display_language())) + + def get_pie_args(self) -> T.List[str]: + m = 'Language {} does not support position-independent executable' + raise EnvironmentException(m.format(self.get_display_language())) + + def get_pie_link_args(self) -> T.List[str]: + return self.linker.get_pie_args() + + def get_argument_syntax(self) -> str: + """Returns the argument family type. + + Compilers fall into families if they try to emulate the command line + interface of another compiler. For example, clang is in the GCC family + since it accepts most of the same arguments as GCC. ICL (ICC on + windows) is in the MSVC family since it accepts most of the same + arguments as MSVC. + """ + return 'other' + + def get_profile_generate_args(self) -> T.List[str]: + raise EnvironmentException( + '%s does not support get_profile_generate_args ' % self.get_id()) + + def get_profile_use_args(self) -> T.List[str]: + raise EnvironmentException( + '%s does not support get_profile_use_args ' % self.get_id()) + + def remove_linkerlike_args(self, args: T.List[str]) -> T.List[str]: + rm_exact = ('-headerpad_max_install_names',) + rm_prefixes = ('-Wl,', '-L',) + rm_next = ('-L', '-framework',) + ret: T.List[str] = [] + iargs = iter(args) + for arg in iargs: + # Remove this argument + if arg in rm_exact: + continue + # If the argument starts with this, but is not *exactly* this + # f.ex., '-L' should match ['-Lfoo'] but not ['-L', 'foo'] + if arg.startswith(rm_prefixes) and arg not in rm_prefixes: + continue + # Ignore this argument and the one after it + if arg in rm_next: + next(iargs) + continue + ret.append(arg) + return ret + + def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]: + return [] + + def get_lto_link_args(self, *, threads: int = 0, mode: str = 'default', + thinlto_cache_dir: T.Optional[str] = None) -> T.List[str]: + return self.linker.get_lto_args() + + def sanitizer_compile_args(self, value: str) -> T.List[str]: + return [] + + def sanitizer_link_args(self, value: str) -> T.List[str]: + return self.linker.sanitizer_args(value) + + def get_asneeded_args(self) -> T.List[str]: + return self.linker.get_asneeded_args() + + def headerpad_args(self) -> T.List[str]: + return self.linker.headerpad_args() + + def bitcode_args(self) -> T.List[str]: + return self.linker.bitcode_args() + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + raise EnvironmentException(f'{self.id} does not implement get_buildtype_args') + + def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]: + return self.linker.get_buildtype_args(buildtype) + + def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str, + suffix: str, soversion: str, + darwin_versions: T.Tuple[str, str]) -> T.List[str]: + return self.linker.get_soname_args( + env, prefix, shlib_name, suffix, soversion, + darwin_versions) + + def get_target_link_args(self, target: 'BuildTarget') -> T.List[str]: + return target.link_args + + def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]: + return dep.get_compile_args() + + def get_dependency_link_args(self, dep: 'Dependency') -> T.List[str]: + return dep.get_link_args() + + @classmethod + def use_linker_args(cls, linker: str, version: str) -> T.List[str]: + """Get a list of arguments to pass to the compiler to set the linker. + """ + return [] + + def get_coverage_args(self) -> T.List[str]: + return [] + + def get_coverage_link_args(self) -> T.List[str]: + return self.linker.get_coverage_args() + + def get_assert_args(self, disable: bool) -> T.List[str]: + """Get arguments to enable or disable assertion. + + :param disable: Whether to disable assertions + :return: A list of string arguments for this compiler + """ + return [] + + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + raise EnvironmentException('This compiler does not support Windows CRT selection') + + def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]: + raise EnvironmentException('This compiler does not support Windows CRT selection') + + def get_compile_only_args(self) -> T.List[str]: + return [] + + def get_preprocess_only_args(self) -> T.List[str]: + raise EnvironmentException('This compiler does not have a preprocessor') + + def get_preprocess_to_file_args(self) -> T.List[str]: + return self.get_preprocess_only_args() + + def get_default_include_dirs(self) -> T.List[str]: + # TODO: This is a candidate for returning an immutable list + return [] + + def get_largefile_args(self) -> T.List[str]: + '''Enable transparent large-file-support for 32-bit UNIX systems''' + if not (self.get_argument_syntax() == 'msvc' or self.info.is_darwin()): + # Enable large-file support unconditionally on all platforms other + # than macOS and MSVC. macOS is now 64-bit-only so it doesn't + # need anything special, and MSVC doesn't have automatic LFS. + # You must use the 64-bit counterparts explicitly. + # glibc, musl, and uclibc, and all BSD libcs support this. On Android, + # support for transparent LFS is available depending on the version of + # Bionic: https://github.com/android/platform_bionic#32-bit-abi-bugs + # https://code.google.com/p/android/issues/detail?id=64613 + # + # If this breaks your code, fix it! It's been 20+ years! + return ['-D_FILE_OFFSET_BITS=64'] + # We don't enable -D_LARGEFILE64_SOURCE since that enables + # transitionary features and must be enabled by programs that use + # those features explicitly. + return [] + + def get_library_dirs(self, env: 'Environment', + elf_class: T.Optional[int] = None) -> T.List[str]: + return [] + + def get_return_value(self, + fname: str, + rtype: str, + prefix: str, + env: 'Environment', + extra_args: T.Optional[T.List[str]], + dependencies: T.Optional[T.List['Dependency']]) -> T.Union[str, int]: + raise EnvironmentException(f'{self.id} does not support get_return_value') + + def find_framework(self, + name: str, + env: 'Environment', + extra_dirs: T.List[str], + allow_system: bool = True) -> T.Optional[T.List[str]]: + raise EnvironmentException(f'{self.id} does not support find_framework') + + def find_framework_paths(self, env: 'Environment') -> T.List[str]: + raise EnvironmentException(f'{self.id} does not support find_framework_paths') + + def attribute_check_func(self, name: str) -> str: + raise EnvironmentException(f'{self.id} does not support attribute checks') + + def get_pch_suffix(self) -> str: + raise EnvironmentException(f'{self.id} does not support pre compiled headers') + + def get_pch_name(self, name: str) -> str: + raise EnvironmentException(f'{self.id} does not support pre compiled headers') + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + raise EnvironmentException(f'{self.id} does not support pre compiled headers') + + def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]: + raise EnvironmentException(f'{self.id} does not support function attributes') + + def name_string(self) -> str: + return ' '.join(self.exelist) + + @abc.abstractmethod + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + """Check that this compiler actually works. + + This should provide a simple compile/link test. Something as simple as: + ```python + main(): return 0 + ``` + is good enough here. + """ + + def split_shlib_to_parts(self, fname: str) -> T.Tuple[T.Optional[str], str]: + return None, fname + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + return [] + + def get_std_exe_link_args(self) -> T.List[str]: + # TODO: is this a linker property? + return [] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + return [] + + def depfile_for_object(self, objfile: str) -> T.Optional[str]: + return objfile + '.' + self.get_depfile_suffix() + + def get_depfile_suffix(self) -> str: + raise EnvironmentException(f'{self.id} does not implement get_depfile_suffix') + + def get_no_stdinc_args(self) -> T.List[str]: + """Arguments to turn off default inclusion of standard libraries.""" + return [] + + def get_warn_args(self, level: str) -> T.List[str]: + return [] + + def get_werror_args(self) -> T.List[str]: + return [] + + @abc.abstractmethod + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + pass + + def get_module_incdir_args(self) -> T.Tuple[str, ...]: + raise EnvironmentException(f'{self.id} does not implement get_module_incdir_args') + + def get_module_outdir_args(self, path: str) -> T.List[str]: + raise EnvironmentException(f'{self.id} does not implement get_module_outdir_args') + + def module_name_to_filename(self, module_name: str) -> str: + raise EnvironmentException(f'{self.id} does not implement module_name_to_filename') + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + """Arguments to pass the compiler and/or linker for checks. + + The default implementation turns off optimizations. + + Examples of things that go here: + - extra arguments for error checking + - Arguments required to make the compiler exit with a non-zero status + when something is wrong. + """ + return self.get_no_optimization_args() + + def get_no_optimization_args(self) -> T.List[str]: + """Arguments to the compiler to turn off all optimizations.""" + return [] + + def build_wrapper_args(self, env: 'Environment', + extra_args: T.Union[None, CompilerArgs, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]], + dependencies: T.Optional[T.List['Dependency']], + mode: CompileCheckMode = CompileCheckMode.COMPILE) -> CompilerArgs: + """Arguments to pass the build_wrapper helper. + + This generally needs to be set on a per-language basis. It provides + a hook for languages to handle dependencies and extra args. The base + implementation handles the most common cases, namely adding the + check_arguments, unwrapping dependencies, and appending extra args. + """ + if callable(extra_args): + extra_args = extra_args(mode) + if extra_args is None: + extra_args = [] + if dependencies is None: + dependencies = [] + + # Collect compiler arguments + args = self.compiler_args(self.get_compiler_check_args(mode)) + for d in dependencies: + # Add compile flags needed by dependencies + args += d.get_compile_args() + if mode is CompileCheckMode.LINK: + # Add link flags needed to find dependencies + args += d.get_link_args() + + if mode is CompileCheckMode.COMPILE: + # Add DFLAGS from the env + args += env.coredata.get_external_args(self.for_machine, self.language) + elif mode is CompileCheckMode.LINK: + # Add LDFLAGS from the env + args += env.coredata.get_external_link_args(self.for_machine, self.language) + # extra_args must override all other arguments, so we add them last + args += extra_args + return args + + @contextlib.contextmanager + def _build_wrapper(self, code: 'mesonlib.FileOrString', env: 'Environment', + extra_args: T.Union[None, CompilerArgs, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + mode: CompileCheckMode = CompileCheckMode.COMPILE, want_output: bool = False, + disable_cache: bool = False) -> T.Iterator[T.Optional[CompileResult]]: + """Helper for getting a cached value when possible. + + This method isn't meant to be called externally, it's mean to be + wrapped by other methods like compiles() and links(). + """ + args = self.build_wrapper_args(env, extra_args, dependencies, mode) + if disable_cache or want_output: + with self.compile(code, extra_args=args, mode=mode, want_output=want_output, temp_dir=env.scratch_dir) as r: + yield r + else: + with self.cached_compile(code, env.coredata, extra_args=args, mode=mode, temp_dir=env.scratch_dir) as r: + yield r + + def compiles(self, code: 'mesonlib.FileOrString', env: 'Environment', *, + extra_args: T.Union[None, T.List[str], CompilerArgs, T.Callable[[CompileCheckMode], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + mode: CompileCheckMode = CompileCheckMode.COMPILE, + disable_cache: bool = False) -> T.Tuple[bool, bool]: + with self._build_wrapper(code, env, extra_args, dependencies, mode, disable_cache=disable_cache) as p: + return p.returncode == 0, p.cached + + def links(self, code: 'mesonlib.FileOrString', env: 'Environment', *, + compiler: T.Optional['Compiler'] = None, + extra_args: T.Union[None, T.List[str], CompilerArgs, T.Callable[[CompileCheckMode], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + disable_cache: bool = False) -> T.Tuple[bool, bool]: + if compiler: + with compiler._build_wrapper(code, env, dependencies=dependencies, want_output=True) as r: + objfile = mesonlib.File.from_absolute_file(r.output_name) + return self.compiles(objfile, env, extra_args=extra_args, + dependencies=dependencies, mode=CompileCheckMode.LINK, disable_cache=True) + + return self.compiles(code, env, extra_args=extra_args, + dependencies=dependencies, mode=CompileCheckMode.LINK, disable_cache=disable_cache) + + def get_feature_args(self, kwargs: T.Dict[str, T.Any], build_to_src: str) -> T.List[str]: + """Used by D for extra language features.""" + # TODO: using a TypeDict here would improve this + raise EnvironmentException(f'{self.id} does not implement get_feature_args') + + def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.List[str]: + raise EnvironmentException(f'{self.id} does not know how to do prelinking.') + + def rsp_file_syntax(self) -> 'RSPFileSyntax': + """The format of the RSP file that this compiler supports. + + If `self.can_linker_accept_rsp()` returns True, then this needs to + be implemented + """ + return self.linker.rsp_file_syntax() + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + """Arguments required for a debug build.""" + return [] + + def get_no_warn_args(self) -> T.List[str]: + """Arguments to completely disable warnings.""" + return [] + + def needs_static_linker(self) -> bool: + raise NotImplementedError(f'There is no static linker for {self.language}') + + def get_preprocessor(self) -> Compiler: + """Get compiler's preprocessor. + """ + raise EnvironmentException(f'{self.get_id()} does not support preprocessor') + +def get_global_options(lang: str, + comp: T.Type[Compiler], + for_machine: MachineChoice, + env: 'Environment') -> 'KeyedOptionDictType': + """Retrieve options that apply to all compilers for a given language.""" + description = f'Extra arguments passed to the {lang}' + argkey = OptionKey('args', lang=lang, machine=for_machine) + largkey = argkey.evolve('link_args') + envkey = argkey.evolve('env_args') + + comp_key = argkey if argkey in env.options else envkey + + comp_options = env.options.get(comp_key, []) + link_options = env.options.get(largkey, []) + + cargs = coredata.UserArrayOption( + description + ' compiler', + comp_options, split_args=True, user_input=True, allow_dups=True) + + largs = coredata.UserArrayOption( + description + ' linker', + link_options, split_args=True, user_input=True, allow_dups=True) + + if comp.INVOKES_LINKER and comp_key == envkey: + # If the compiler acts as a linker driver, and we're using the + # environment variable flags for both the compiler and linker + # arguments, then put the compiler flags in the linker flags as well. + # This is how autotools works, and the env vars feature is for + # autotools compatibility. + largs.extend_value(comp_options) + + opts: 'KeyedOptionDictType' = {argkey: cargs, largkey: largs} + + return opts diff --git a/vendored-meson/meson/mesonbuild/compilers/cpp.py b/vendored-meson/meson/mesonbuild/compilers/cpp.py new file mode 100644 index 000000000000..7e8c327f0334 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/cpp.py @@ -0,0 +1,964 @@ +# Copyright 2012-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import copy +import functools +import os.path +import typing as T + +from .. import coredata +from .. import mlog +from ..mesonlib import MesonException, version_compare, OptionKey + +from .compilers import ( + gnu_winlibs, + msvc_winlibs, + Compiler, + CompileCheckMode, +) +from .c_function_attributes import CXX_FUNC_ATTRIBUTES, C_FUNC_ATTRIBUTES +from .mixins.clike import CLikeCompiler +from .mixins.ccrx import CcrxCompiler +from .mixins.ti import TICompiler +from .mixins.arm import ArmCompiler, ArmclangCompiler +from .mixins.visualstudio import MSVCCompiler, ClangClCompiler +from .mixins.gnu import GnuCompiler, gnu_common_warning_args, gnu_cpp_warning_args +from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler +from .mixins.clang import ClangCompiler +from .mixins.elbrus import ElbrusCompiler +from .mixins.pgi import PGICompiler +from .mixins.emscripten import EmscriptenMixin +from .mixins.metrowerks import MetrowerksCompiler +from .mixins.metrowerks import mwccarm_instruction_set_args, mwcceppc_instruction_set_args + +if T.TYPE_CHECKING: + from ..coredata import MutableKeyedOptionDictType, KeyedOptionDictType + from ..dependencies import Dependency + from ..envconfig import MachineInfo + from ..environment import Environment + from ..linkers.linkers import DynamicLinker + from ..mesonlib import MachineChoice + from ..programs import ExternalProgram + CompilerMixinBase = CLikeCompiler +else: + CompilerMixinBase = object + + +def non_msvc_eh_options(eh: str, args: T.List[str]) -> None: + if eh == 'none': + args.append('-fno-exceptions') + elif eh in {'s', 'c'}: + mlog.warning(f'non-MSVC compilers do not support {eh} exception handling. ' + 'You may want to set eh to \'default\'.', fatal=False) + +class CPPCompiler(CLikeCompiler, Compiler): + def attribute_check_func(self, name: str) -> str: + try: + return CXX_FUNC_ATTRIBUTES.get(name, C_FUNC_ATTRIBUTES[name]) + except KeyError: + raise MesonException(f'Unknown function attribute "{name}"') + + language = 'cpp' + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + # If a child ObjCPP class has already set it, don't set it ourselves + Compiler.__init__(self, ccache, exelist, version, for_machine, info, + is_cross=is_cross, linker=linker, + full_version=full_version) + CLikeCompiler.__init__(self, exe_wrapper) + + @classmethod + def get_display_language(cls) -> str: + return 'C++' + + def get_no_stdinc_args(self) -> T.List[str]: + return ['-nostdinc++'] + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + code = 'class breakCCompiler;int main(void) { return 0; }\n' + return self._sanity_check_impl(work_dir, environment, 'sanitycheckcpp.cc', code) + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + # -fpermissive allows non-conforming code to compile which is necessary + # for many C++ checks. Particularly, the has_header_symbol check is + # too strict without this and always fails. + return super().get_compiler_check_args(mode) + ['-fpermissive'] + + def has_header_symbol(self, hname: str, symbol: str, prefix: str, + env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + # Check if it's a C-like symbol + found, cached = super().has_header_symbol(hname, symbol, prefix, env, + extra_args=extra_args, + dependencies=dependencies) + if found: + return True, cached + # Check if it's a class or a template + if extra_args is None: + extra_args = [] + t = f'''{prefix} + #include <{hname}> + using {symbol}; + int main(void) {{ return 0; }}''' + return self.compiles(t, env, extra_args=extra_args, + dependencies=dependencies) + + def _test_cpp_std_arg(self, cpp_std_value: str) -> bool: + # Test whether the compiler understands a -std=XY argument + assert cpp_std_value.startswith('-std=') + + # This test does not use has_multi_arguments() for two reasons: + # 1. has_multi_arguments() requires an env argument, which the compiler + # object does not have at this point. + # 2. even if it did have an env object, that might contain another more + # recent -std= argument, which might lead to a cascaded failure. + CPP_TEST = 'int i = static_cast(0);' + with self.compile(CPP_TEST, extra_args=[cpp_std_value], mode=CompileCheckMode.COMPILE) as p: + if p.returncode == 0: + mlog.debug(f'Compiler accepts {cpp_std_value}:', 'YES') + return True + else: + mlog.debug(f'Compiler accepts {cpp_std_value}:', 'NO') + return False + + @functools.lru_cache() + def _find_best_cpp_std(self, cpp_std: str) -> str: + # The initial version mapping approach to make falling back + # from '-std=c++14' to '-std=c++1y' was too brittle. For instance, + # Apple's Clang uses a different versioning scheme to upstream LLVM, + # making the whole detection logic awfully brittle. Instead, let's + # just see if feeding GCC or Clang our '-std=' setting works, and + # if not, try the fallback argument. + CPP_FALLBACKS = { + 'c++11': 'c++0x', + 'gnu++11': 'gnu++0x', + 'c++14': 'c++1y', + 'gnu++14': 'gnu++1y', + 'c++17': 'c++1z', + 'gnu++17': 'gnu++1z', + 'c++20': 'c++2a', + 'gnu++20': 'gnu++2a', + } + + # Currently, remapping is only supported for Clang, Elbrus and GCC + assert self.id in frozenset(['clang', 'lcc', 'gcc', 'emscripten', 'armltdclang', 'intel-llvm']) + + if cpp_std not in CPP_FALLBACKS: + # 'c++03' and 'c++98' don't have fallback types + return '-std=' + cpp_std + + for i in (cpp_std, CPP_FALLBACKS[cpp_std]): + cpp_std_value = '-std=' + i + if self._test_cpp_std_arg(cpp_std_value): + return cpp_std_value + + raise MesonException(f'C++ Compiler does not support -std={cpp_std}') + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = super().get_options() + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts.update({ + key: coredata.UserComboOption( + 'C++ language standard to use', + ['none'], + 'none', + ), + }) + return opts + + +class _StdCPPLibMixin(CompilerMixinBase): + + """Detect whether to use libc++ or libstdc++.""" + + @functools.lru_cache(None) + def language_stdlib_only_link_flags(self, env: Environment) -> T.List[str]: + """Detect the C++ stdlib and default search dirs + + As an optimization, this method will cache the value, to avoid building the same values over and over + + :param env: An Environment object + :raises MesonException: If a stdlib cannot be determined + """ + + # We need to apply the search prefix here, as these link arguments may + # be passed to a different compiler with a different set of default + # search paths, such as when using Clang for C/C++ and gfortran for + # fortran. + search_dirs = [f'-L{d}' for d in self.get_compiler_dirs(env, 'libraries')] + + machine = env.machines[self.for_machine] + assert machine is not None, 'for mypy' + + # We need to determine whether to use libc++ or libstdc++. We can't + # really know the answer in most cases, only the most likely answer, + # because a user can install things themselves or build custom images. + search_order: T.List[str] = [] + if machine.system in {'android', 'darwin', 'dragonfly', 'freebsd', 'netbsd', 'openbsd'}: + search_order = ['c++', 'stdc++'] + else: + search_order = ['stdc++', 'c++'] + for lib in search_order: + if self.find_library(lib, env, []) is not None: + return search_dirs + [f'-l{lib}'] + # TODO: maybe a bug exception? + raise MesonException('Could not detect either libc++ or libstdc++ as your C++ stdlib implementation.') + + +class ClangCPPCompiler(_StdCPPLibMixin, ClangCompiler, CPPCompiler): + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + ClangCompiler.__init__(self, defines) + default_warn_args = ['-Wall', '-Winvalid-pch'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic'], + 'everything': ['-Weverything']} + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CPPCompiler.get_options(self) + key = OptionKey('key', machine=self.for_machine, lang=self.language) + opts.update({ + key.evolve('eh'): coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + key.evolve('rtti'): coredata.UserBooleanOption('Enable RTTI', True), + }) + opts[key.evolve('std')].choices = [ + 'none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++1z', + 'c++2a', 'c++20', 'gnu++11', 'gnu++14', 'gnu++17', 'gnu++1z', + 'gnu++2a', 'gnu++20', + ] + if self.info.is_windows() or self.info.is_cygwin(): + opts.update({ + key.evolve('winlibs'): coredata.UserArrayOption( + 'Standard Win libraries to link against', + gnu_winlibs, + ), + }) + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args: T.List[str] = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append(self._find_best_cpp_std(std.value)) + + non_msvc_eh_options(options[key.evolve('eh')].value, args) + + if not options[key.evolve('rtti')].value: + args.append('-fno-rtti') + + return args + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + if self.info.is_windows() or self.info.is_cygwin(): + # without a typedict mypy can't understand this. + key = OptionKey('winlibs', machine=self.for_machine, lang=self.language) + libs = options[key].value.copy() + assert isinstance(libs, list) + for l in libs: + assert isinstance(l, str) + return libs + return [] + + +class ArmLtdClangCPPCompiler(ClangCPPCompiler): + + id = 'armltdclang' + + +class AppleClangCPPCompiler(ClangCPPCompiler): + pass + + +class EmscriptenCPPCompiler(EmscriptenMixin, ClangCPPCompiler): + + id = 'emscripten' + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + if not is_cross: + raise MesonException('Emscripten compiler can only be used for cross compilation.') + if not version_compare(version, '>=1.39.19'): + raise MesonException('Meson requires Emscripten >= 1.39.19') + ClangCPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper=exe_wrapper, linker=linker, + defines=defines, full_version=full_version) + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args: T.List[str] = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append(self._find_best_cpp_std(std.value)) + return args + + +class ArmclangCPPCompiler(ArmclangCompiler, CPPCompiler): + ''' + Keil armclang + ''' + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + ArmclangCompiler.__init__(self) + default_warn_args = ['-Wall', '-Winvalid-pch'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic'], + 'everything': ['-Weverything']} + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CPPCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts.update({ + key.evolve('eh'): coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + }) + opts[key].choices = [ + 'none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'gnu++98', + 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++17', + ] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args: T.List[str] = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append('-std=' + std.value) + + non_msvc_eh_options(options[key.evolve('eh')].value, args) + + return args + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + +class GnuCPPCompiler(_StdCPPLibMixin, GnuCompiler, CPPCompiler): + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + GnuCompiler.__init__(self, defines) + default_warn_args = ['-Wall', '-Winvalid-pch'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic'], + 'everything': (default_warn_args + ['-Wextra', '-Wpedantic'] + + self.supported_warn_args(gnu_common_warning_args) + + self.supported_warn_args(gnu_cpp_warning_args))} + + def get_options(self) -> 'MutableKeyedOptionDictType': + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts = CPPCompiler.get_options(self) + opts.update({ + key.evolve('eh'): coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + key.evolve('rtti'): coredata.UserBooleanOption('Enable RTTI', True), + key.evolve('debugstl'): coredata.UserBooleanOption( + 'STL debug mode', + False, + ) + }) + cppstd_choices = [ + 'none', 'c++98', 'c++03', 'c++11', 'c++14', 'c++17', 'c++1z', + 'c++2a', 'c++20', 'gnu++03', 'gnu++11', 'gnu++14', 'gnu++17', + 'gnu++1z', 'gnu++2a', 'gnu++20', + ] + if version_compare(self.version, '>=12.2.0'): + cppstd_choices.append('c++23') + cppstd_choices.append('gnu++23') + opts[key].choices = cppstd_choices + if self.info.is_windows() or self.info.is_cygwin(): + opts.update({ + key.evolve('winlibs'): coredata.UserArrayOption( + 'Standard Win libraries to link against', + gnu_winlibs, + ), + }) + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args: T.List[str] = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append(self._find_best_cpp_std(std.value)) + + non_msvc_eh_options(options[key.evolve('eh')].value, args) + + if not options[key.evolve('rtti')].value: + args.append('-fno-rtti') + + if options[key.evolve('debugstl')].value: + args.append('-D_GLIBCXX_DEBUG=1') + return args + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + if self.info.is_windows() or self.info.is_cygwin(): + # without a typedict mypy can't understand this. + key = OptionKey('winlibs', machine=self.for_machine, lang=self.language) + libs = options[key].value.copy() + assert isinstance(libs, list) + for l in libs: + assert isinstance(l, str) + return libs + return [] + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + return ['-fpch-preprocess', '-include', os.path.basename(header)] + + +class PGICPPCompiler(PGICompiler, CPPCompiler): + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + PGICompiler.__init__(self) + + +class NvidiaHPC_CPPCompiler(PGICompiler, CPPCompiler): + + id = 'nvidia_hpc' + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + PGICompiler.__init__(self) + + +class ElbrusCPPCompiler(ElbrusCompiler, CPPCompiler): + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + ElbrusCompiler.__init__(self) + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CPPCompiler.get_options(self) + + cpp_stds = ['none', 'c++98', 'gnu++98'] + if version_compare(self.version, '>=1.20.00'): + cpp_stds += ['c++03', 'c++0x', 'c++11', 'gnu++03', 'gnu++0x', 'gnu++11'] + if version_compare(self.version, '>=1.21.00') and version_compare(self.version, '<1.22.00'): + cpp_stds += ['c++14', 'gnu++14', 'c++1y', 'gnu++1y'] + if version_compare(self.version, '>=1.22.00'): + cpp_stds += ['c++14', 'gnu++14'] + if version_compare(self.version, '>=1.23.00'): + cpp_stds += ['c++1y', 'gnu++1y'] + if version_compare(self.version, '>=1.24.00'): + cpp_stds += ['c++1z', 'c++17', 'gnu++1z', 'gnu++17'] + if version_compare(self.version, '>=1.25.00'): + cpp_stds += ['c++2a', 'gnu++2a'] + if version_compare(self.version, '>=1.26.00'): + cpp_stds += ['c++20', 'gnu++20'] + + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts.update({ + key.evolve('eh'): coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + key.evolve('debugstl'): coredata.UserBooleanOption( + 'STL debug mode', + False, + ), + }) + opts[key].choices = cpp_stds + return opts + + # Elbrus C++ compiler does not have lchmod, but there is only linker warning, not compiler error. + # So we should explicitly fail at this case. + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + if funcname == 'lchmod': + return False, False + else: + return super().has_function(funcname, prefix, env, + extra_args=extra_args, + dependencies=dependencies) + + # Elbrus C++ compiler does not support RTTI, so don't check for it. + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args: T.List[str] = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append(self._find_best_cpp_std(std.value)) + + non_msvc_eh_options(options[key.evolve('eh')].value, args) + + if options[key.evolve('debugstl')].value: + args.append('-D_GLIBCXX_DEBUG=1') + return args + + +class IntelCPPCompiler(IntelGnuLikeCompiler, CPPCompiler): + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + IntelGnuLikeCompiler.__init__(self) + self.lang_header = 'c++-header' + default_warn_args = ['-Wall', '-w3', '-Wpch-messages'] + self.warn_args = {'0': [], + '1': default_warn_args + ['-diag-disable:remark'], + '2': default_warn_args + ['-Wextra', '-diag-disable:remark'], + '3': default_warn_args + ['-Wextra', '-diag-disable:remark'], + 'everything': default_warn_args + ['-Wextra']} + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CPPCompiler.get_options(self) + # Every Unix compiler under the sun seems to accept -std=c++03, + # with the exception of ICC. Instead of preventing the user from + # globally requesting C++03, we transparently remap it to C++98 + c_stds = ['c++98', 'c++03'] + g_stds = ['gnu++98', 'gnu++03'] + if version_compare(self.version, '>=15.0.0'): + c_stds += ['c++11', 'c++14'] + g_stds += ['gnu++11'] + if version_compare(self.version, '>=16.0.0'): + c_stds += ['c++17'] + if version_compare(self.version, '>=17.0.0'): + g_stds += ['gnu++14'] + if version_compare(self.version, '>=19.1.0'): + c_stds += ['c++2a'] + g_stds += ['gnu++2a'] + + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts.update({ + key.evolve('eh'): coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + key.evolve('rtti'): coredata.UserBooleanOption('Enable RTTI', True), + key.evolve('debugstl'): coredata.UserBooleanOption('STL debug mode', False), + }) + opts[key].choices = ['none'] + c_stds + g_stds + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args: T.List[str] = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + remap_cpp03 = { + 'c++03': 'c++98', + 'gnu++03': 'gnu++98' + } + args.append('-std=' + remap_cpp03.get(std.value, std.value)) + if options[key.evolve('eh')].value == 'none': + args.append('-fno-exceptions') + if not options[key.evolve('rtti')].value: + args.append('-fno-rtti') + if options[key.evolve('debugstl')].value: + args.append('-D_GLIBCXX_DEBUG=1') + return args + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + +class IntelLLVMCPPCompiler(ClangCPPCompiler): + + id = 'intel-llvm' + + +class VisualStudioLikeCPPCompilerMixin(CompilerMixinBase): + + """Mixin for C++ specific method overrides in MSVC-like compilers.""" + + VC_VERSION_MAP = { + 'none': (True, None), + 'vc++11': (True, 11), + 'vc++14': (True, 14), + 'vc++17': (True, 17), + 'vc++20': (True, 20), + 'vc++latest': (True, "latest"), + 'c++11': (False, 11), + 'c++14': (False, 14), + 'c++17': (False, 17), + 'c++20': (False, 20), + 'c++latest': (False, "latest"), + } + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + # need a typeddict for this + key = OptionKey('winlibs', machine=self.for_machine, lang=self.language) + return T.cast('T.List[str]', options[key].value[:]) + + def _get_options_impl(self, opts: 'MutableKeyedOptionDictType', cpp_stds: T.List[str]) -> 'MutableKeyedOptionDictType': + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts.update({ + key.evolve('eh'): coredata.UserComboOption( + 'C++ exception handling type.', + ['none', 'default', 'a', 's', 'sc'], + 'default', + ), + key.evolve('rtti'): coredata.UserBooleanOption('Enable RTTI', True), + key.evolve('winlibs'): coredata.UserArrayOption( + 'Windows libs to link against.', + msvc_winlibs, + ), + }) + opts[key.evolve('std')].choices = cpp_stds + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args: T.List[str] = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + + eh = options[key.evolve('eh')] + if eh.value == 'default': + args.append('/EHsc') + elif eh.value == 'none': + args.append('/EHs-c-') + else: + args.append('/EH' + eh.value) + + if not options[key.evolve('rtti')].value: + args.append('/GR-') + + permissive, ver = self.VC_VERSION_MAP[options[key].value] + + if ver is not None: + args.append(f'/std:c++{ver}') + + if not permissive: + args.append('/permissive-') + + return args + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + # XXX: this is a hack because so much GnuLike stuff is in the base CPPCompiler class. + return Compiler.get_compiler_check_args(self, mode) + + +class CPP11AsCPP14Mixin(CompilerMixinBase): + + """Mixin class for VisualStudio and ClangCl to replace C++11 std with C++14. + + This is a limitation of Clang and MSVC that ICL doesn't share. + """ + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + # Note: there is no explicit flag for supporting C++11; we attempt to do the best we can + # which means setting the C++ standard version to C++14, in compilers that support it + # (i.e., after VS2015U3) + # if one is using anything before that point, one cannot set the standard. + key = OptionKey('std', machine=self.for_machine, lang=self.language) + if options[key].value in {'vc++11', 'c++11'}: + mlog.warning(self.id, 'does not support C++11;', + 'attempting best effort; setting the standard to C++14', + once=True, fatal=False) + # Don't mutate anything we're going to change, we need to use + # deepcopy since we're messing with members, and we can't simply + # copy the members because the option proxy doesn't support it. + options = copy.deepcopy(options) + if options[key].value == 'vc++11': + options[key].value = 'vc++14' + else: + options[key].value = 'c++14' + return super().get_option_compile_args(options) + + +class VisualStudioCPPCompiler(CPP11AsCPP14Mixin, VisualStudioLikeCPPCompilerMixin, MSVCCompiler, CPPCompiler): + + id = 'msvc' + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + MSVCCompiler.__init__(self, target) + + # By default, MSVC has a broken __cplusplus define that pretends to be c++98: + # https://docs.microsoft.com/en-us/cpp/build/reference/zc-cplusplus?view=msvc-160 + # Pass the flag to enable a truthful define, if possible. + if version_compare(self.version, '>= 19.14.26428'): + self.always_args = self.always_args + ['/Zc:__cplusplus'] + + def get_options(self) -> 'MutableKeyedOptionDictType': + cpp_stds = ['none', 'c++11', 'vc++11'] + # Visual Studio 2015 and later + if version_compare(self.version, '>=19'): + cpp_stds.extend(['c++14', 'c++latest', 'vc++latest']) + # Visual Studio 2017 and later + if version_compare(self.version, '>=19.11'): + cpp_stds.extend(['vc++14', 'c++17', 'vc++17']) + if version_compare(self.version, '>=19.29'): + cpp_stds.extend(['c++20', 'vc++20']) + return self._get_options_impl(super().get_options(), cpp_stds) + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + key = OptionKey('std', machine=self.for_machine, lang=self.language) + if options[key].value != 'none' and version_compare(self.version, '<19.00.24210'): + mlog.warning('This version of MSVC does not support cpp_std arguments', fatal=False) + options = copy.copy(options) + options[key].value = 'none' + + args = super().get_option_compile_args(options) + + if version_compare(self.version, '<19.11'): + try: + i = args.index('/permissive-') + except ValueError: + return args + del args[i] + return args + +class ClangClCPPCompiler(CPP11AsCPP14Mixin, VisualStudioLikeCPPCompilerMixin, ClangClCompiler, CPPCompiler): + + id = 'clang-cl' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, [], exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + ClangClCompiler.__init__(self, target) + + def get_options(self) -> 'MutableKeyedOptionDictType': + cpp_stds = ['none', 'c++11', 'vc++11', 'c++14', 'vc++14', 'c++17', 'vc++17', 'c++20', 'vc++20', 'c++latest'] + return self._get_options_impl(super().get_options(), cpp_stds) + + +class IntelClCPPCompiler(VisualStudioLikeCPPCompilerMixin, IntelVisualStudioLikeCompiler, CPPCompiler): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, [], exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + IntelVisualStudioLikeCompiler.__init__(self, target) + + def get_options(self) -> 'MutableKeyedOptionDictType': + # This has only been tested with version 19.0, + cpp_stds = ['none', 'c++11', 'vc++11', 'c++14', 'vc++14', 'c++17', 'vc++17', 'c++latest'] + return self._get_options_impl(super().get_options(), cpp_stds) + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + # XXX: this is a hack because so much GnuLike stuff is in the base CPPCompiler class. + return IntelVisualStudioLikeCompiler.get_compiler_check_args(self, mode) + + +class IntelLLVMClCPPCompiler(IntelClCPPCompiler): + + id = 'intel-llvm-cl' + + +class ArmCPPCompiler(ArmCompiler, CPPCompiler): + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + ArmCompiler.__init__(self) + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CPPCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c++03', 'c++11'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args: T.List[str] = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value == 'c++11': + args.append('--cpp11') + elif std.value == 'c++03': + args.append('--cpp') + return args + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + return [] + + +class CcrxCPPCompiler(CcrxCompiler, CPPCompiler): + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + CcrxCompiler.__init__(self) + + # Override CCompiler.get_always_args + def get_always_args(self) -> T.List[str]: + return ['-nologo', '-lang=cpp'] + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + def get_compile_only_args(self) -> T.List[str]: + return [] + + def get_output_args(self, outputname: str) -> T.List[str]: + return [f'-output=obj={outputname}'] + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + return [] + +class TICPPCompiler(TICompiler, CPPCompiler): + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + TICompiler.__init__(self) + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CPPCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'c++03'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args: T.List[str] = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append('--' + std.value) + return args + + def get_always_args(self) -> T.List[str]: + return [] + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + return [] + +class C2000CPPCompiler(TICPPCompiler): + # Required for backwards compat with projects created before ti-cgt support existed + id = 'c2000' + +class MetrowerksCPPCompilerARM(MetrowerksCompiler, CPPCompiler): + id = 'mwccarm' + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + MetrowerksCompiler.__init__(self) + + def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]: + return mwccarm_instruction_set_args.get(instruction_set, None) + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CPPCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args: T.List[str] = [] + std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] + if std.value != 'none': + args.append('-lang') + args.append(std.value) + return args + +class MetrowerksCPPCompilerEmbeddedPowerPC(MetrowerksCompiler, CPPCompiler): + id = 'mwcceppc' + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + CPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + MetrowerksCompiler.__init__(self) + + def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]: + return mwcceppc_instruction_set_args.get(instruction_set, None) + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = CPPCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args: T.List[str] = [] + std = options[OptionKey('std', machine=self.for_machine, lang=self.language)] + if std.value != 'none': + args.append('-lang ' + std.value) + return args diff --git a/vendored-meson/meson/mesonbuild/compilers/cs.py b/vendored-meson/meson/mesonbuild/compilers/cs.py new file mode 100644 index 000000000000..f0bed5fb8fdf --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/cs.py @@ -0,0 +1,154 @@ +# Copyright 2012-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import os.path, subprocess +import textwrap +import typing as T + +from ..mesonlib import EnvironmentException +from ..linkers import RSPFileSyntax + +from .compilers import Compiler, mono_buildtype_args +from .mixins.islinker import BasicLinkerIsCompilerMixin + +if T.TYPE_CHECKING: + from ..envconfig import MachineInfo + from ..environment import Environment + from ..mesonlib import MachineChoice + +cs_optimization_args = { + 'plain': [], + '0': [], + 'g': [], + '1': ['-optimize+'], + '2': ['-optimize+'], + '3': ['-optimize+'], + 's': ['-optimize+'], + } # type: T.Dict[str, T.List[str]] + + +class CsCompiler(BasicLinkerIsCompilerMixin, Compiler): + + language = 'cs' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', runner: T.Optional[str] = None): + super().__init__([], exelist, version, for_machine, info) + self.runner = runner + + @classmethod + def get_display_language(cls) -> str: + return 'C sharp' + + def get_always_args(self) -> T.List[str]: + return ['/nologo'] + + def get_linker_always_args(self) -> T.List[str]: + return ['/nologo'] + + def get_output_args(self, fname: str) -> T.List[str]: + return ['-out:' + fname] + + def get_link_args(self, fname: str) -> T.List[str]: + return ['-r:' + fname] + + def get_werror_args(self) -> T.List[str]: + return ['-warnaserror'] + + def get_pic_args(self) -> T.List[str]: + return [] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:2] == '-L': + parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:])) + if i[:5] == '-lib:': + parameter_list[idx] = i[:5] + os.path.normpath(os.path.join(build_dir, i[5:])) + + return parameter_list + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + return [] + + def get_pch_name(self, header_name: str) -> str: + return '' + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + src = 'sanity.cs' + obj = 'sanity.exe' + source_name = os.path.join(work_dir, src) + with open(source_name, 'w', encoding='utf-8') as ofile: + ofile.write(textwrap.dedent(''' + public class Sanity { + static public void Main () { + } + } + ''')) + pc = subprocess.Popen(self.exelist + self.get_always_args() + [src], cwd=work_dir) + pc.wait() + if pc.returncode != 0: + raise EnvironmentException('C# compiler %s cannot compile programs.' % self.name_string()) + if self.runner: + cmdlist = [self.runner, obj] + else: + cmdlist = [os.path.join(work_dir, obj)] + pe = subprocess.Popen(cmdlist, cwd=work_dir) + pe.wait() + if pe.returncode != 0: + raise EnvironmentException('Executables created by Mono compiler %s are not runnable.' % self.name_string()) + + def needs_static_linker(self) -> bool: + return False + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + return mono_buildtype_args[buildtype] + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + return ['-debug'] if is_debug else [] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return cs_optimization_args[optimization_level] + + +class MonoCompiler(CsCompiler): + + id = 'mono' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo'): + super().__init__(exelist, version, for_machine, info, runner='mono') + + def rsp_file_syntax(self) -> 'RSPFileSyntax': + return RSPFileSyntax.GCC + + +class VisualStudioCsCompiler(CsCompiler): + + id = 'csc' + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + res = mono_buildtype_args[buildtype] + if not self.info.is_windows(): + tmp = [] + for flag in res: + if flag == '-debug': + flag = '-debug:portable' + tmp.append(flag) + res = tmp + return res + + def rsp_file_syntax(self) -> 'RSPFileSyntax': + return RSPFileSyntax.MSVC diff --git a/vendored-meson/meson/mesonbuild/compilers/cuda.py b/vendored-meson/meson/mesonbuild/compilers/cuda.py new file mode 100644 index 000000000000..8ed7fa41cfcc --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/cuda.py @@ -0,0 +1,791 @@ +# Copyright 2012-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import enum +import os.path +import string +import typing as T + +from .. import coredata +from .. import mlog +from ..mesonlib import ( + EnvironmentException, Popen_safe, OptionOverrideProxy, + is_windows, LibType, OptionKey, version_compare, +) +from .compilers import (Compiler, cuda_buildtype_args, cuda_optimization_args, + cuda_debug_args) + +if T.TYPE_CHECKING: + from .compilers import CompileCheckMode + from ..build import BuildTarget + from ..coredata import MutableKeyedOptionDictType, KeyedOptionDictType + from ..dependencies import Dependency + from ..environment import Environment # noqa: F401 + from ..envconfig import MachineInfo + from ..linkers.linkers import DynamicLinker + from ..mesonlib import MachineChoice + from ..programs import ExternalProgram + + +class _Phase(enum.Enum): + + COMPILER = 'compiler' + LINKER = 'linker' + + +class CudaCompiler(Compiler): + + LINKER_PREFIX = '-Xlinker=' + language = 'cuda' + + # NVCC flags taking no arguments. + _FLAG_PASSTHRU_NOARGS = { + # NVCC --long-option, NVCC -short-option CUDA Toolkit 11.2.1 Reference + '--objdir-as-tempdir', '-objtemp', # 4.2.1.2 + '--generate-dependency-targets', '-MP', # 4.2.1.12 + '--allow-unsupported-compiler', '-allow-unsupported-compiler', # 4.2.1.14 + '--link', # 4.2.2.1 + '--lib', '-lib', # 4.2.2.2 + '--device-link', '-dlink', # 4.2.2.3 + '--device-c', '-dc', # 4.2.2.4 + '--device-w', '-dw', # 4.2.2.5 + '--cuda', '-cuda', # 4.2.2.6 + '--compile', '-c', # 4.2.2.7 + '--fatbin', '-fatbin', # 4.2.2.8 + '--cubin', '-cubin', # 4.2.2.9 + '--ptx', '-ptx', # 4.2.2.10 + '--preprocess', '-E', # 4.2.2.11 + '--generate-dependencies', '-M', # 4.2.2.12 + '--generate-nonsystem-dependencies', '-MM', # 4.2.2.13 + '--generate-dependencies-with-compile', '-MD', # 4.2.2.14 + '--generate-nonsystem-dependencies-with-compile', '-MMD', # 4.2.2.15 + '--run', # 4.2.2.16 + '--profile', '-pg', # 4.2.3.1 + '--debug', '-g', # 4.2.3.2 + '--device-debug', '-G', # 4.2.3.3 + '--extensible-whole-program', '-ewp', # 4.2.3.4 + '--generate-line-info', '-lineinfo', # 4.2.3.5 + '--dlink-time-opt', '-dlto', # 4.2.3.8 + '--no-exceptions', '-noeh', # 4.2.3.11 + '--shared', '-shared', # 4.2.3.12 + '--no-host-device-initializer-list', '-nohdinitlist', # 4.2.3.15 + '--expt-relaxed-constexpr', '-expt-relaxed-constexpr', # 4.2.3.16 + '--extended-lambda', '-extended-lambda', # 4.2.3.17 + '--expt-extended-lambda', '-expt-extended-lambda', # 4.2.3.18 + '--m32', '-m32', # 4.2.3.20 + '--m64', '-m64', # 4.2.3.21 + '--forward-unknown-to-host-compiler', '-forward-unknown-to-host-compiler', # 4.2.5.1 + '--forward-unknown-to-host-linker', '-forward-unknown-to-host-linker', # 4.2.5.2 + '--dont-use-profile', '-noprof', # 4.2.5.3 + '--dryrun', '-dryrun', # 4.2.5.5 + '--verbose', '-v', # 4.2.5.6 + '--keep', '-keep', # 4.2.5.7 + '--save-temps', '-save-temps', # 4.2.5.9 + '--clean-targets', '-clean', # 4.2.5.10 + '--no-align-double', # 4.2.5.16 + '--no-device-link', '-nodlink', # 4.2.5.17 + '--allow-unsupported-compiler', '-allow-unsupported-compiler', # 4.2.5.18 + '--use_fast_math', '-use_fast_math', # 4.2.7.7 + '--extra-device-vectorization', '-extra-device-vectorization', # 4.2.7.12 + '--compile-as-tools-patch', '-astoolspatch', # 4.2.7.13 + '--keep-device-functions', '-keep-device-functions', # 4.2.7.14 + '--disable-warnings', '-w', # 4.2.8.1 + '--source-in-ptx', '-src-in-ptx', # 4.2.8.2 + '--restrict', '-restrict', # 4.2.8.3 + '--Wno-deprecated-gpu-targets', '-Wno-deprecated-gpu-targets', # 4.2.8.4 + '--Wno-deprecated-declarations', '-Wno-deprecated-declarations', # 4.2.8.5 + '--Wreorder', '-Wreorder', # 4.2.8.6 + '--Wdefault-stream-launch', '-Wdefault-stream-launch', # 4.2.8.7 + '--Wext-lambda-captures-this', '-Wext-lambda-captures-this', # 4.2.8.8 + '--display-error-number', '-err-no', # 4.2.8.10 + '--resource-usage', '-res-usage', # 4.2.8.14 + '--help', '-h', # 4.2.8.15 + '--version', '-V', # 4.2.8.16 + '--list-gpu-code', '-code-ls', # 4.2.8.20 + '--list-gpu-arch', '-arch-ls', # 4.2.8.21 + } + # Dictionary of NVCC flags taking either one argument or a comma-separated list. + # Maps --long to -short options, because the short options are more GCC-like. + _FLAG_LONG2SHORT_WITHARGS = { + '--output-file': '-o', # 4.2.1.1 + '--pre-include': '-include', # 4.2.1.3 + '--library': '-l', # 4.2.1.4 + '--define-macro': '-D', # 4.2.1.5 + '--undefine-macro': '-U', # 4.2.1.6 + '--include-path': '-I', # 4.2.1.7 + '--system-include': '-isystem', # 4.2.1.8 + '--library-path': '-L', # 4.2.1.9 + '--output-directory': '-odir', # 4.2.1.10 + '--dependency-output': '-MF', # 4.2.1.11 + '--compiler-bindir': '-ccbin', # 4.2.1.13 + '--archiver-binary': '-arbin', # 4.2.1.15 + '--cudart': '-cudart', # 4.2.1.16 + '--cudadevrt': '-cudadevrt', # 4.2.1.17 + '--libdevice-directory': '-ldir', # 4.2.1.18 + '--target-directory': '-target-dir', # 4.2.1.19 + '--optimization-info': '-opt-info', # 4.2.3.6 + '--optimize': '-O', # 4.2.3.7 + '--ftemplate-backtrace-limit': '-ftemplate-backtrace-limit', # 4.2.3.9 + '--ftemplate-depth': '-ftemplate-depth', # 4.2.3.10 + '--x': '-x', # 4.2.3.13 + '--std': '-std', # 4.2.3.14 + '--machine': '-m', # 4.2.3.19 + '--compiler-options': '-Xcompiler', # 4.2.4.1 + '--linker-options': '-Xlinker', # 4.2.4.2 + '--archive-options': '-Xarchive', # 4.2.4.3 + '--ptxas-options': '-Xptxas', # 4.2.4.4 + '--nvlink-options': '-Xnvlink', # 4.2.4.5 + '--threads': '-t', # 4.2.5.4 + '--keep-dir': '-keep-dir', # 4.2.5.8 + '--run-args': '-run-args', # 4.2.5.11 + '--input-drive-prefix': '-idp', # 4.2.5.12 + '--dependency-drive-prefix': '-ddp', # 4.2.5.13 + '--drive-prefix': '-dp', # 4.2.5.14 + '--dependency-target-name': '-MT', # 4.2.5.15 + '--default-stream': '-default-stream', # 4.2.6.1 + '--gpu-architecture': '-arch', # 4.2.7.1 + '--gpu-code': '-code', # 4.2.7.2 + '--generate-code': '-gencode', # 4.2.7.3 + '--relocatable-device-code': '-rdc', # 4.2.7.4 + '--entries': '-e', # 4.2.7.5 + '--maxrregcount': '-maxrregcount', # 4.2.7.6 + '--ftz': '-ftz', # 4.2.7.8 + '--prec-div': '-prec-div', # 4.2.7.9 + '--prec-sqrt': '-prec-sqrt', # 4.2.7.10 + '--fmad': '-fmad', # 4.2.7.11 + '--Werror': '-Werror', # 4.2.8.9 + '--diag-error': '-diag-error', # 4.2.8.11 + '--diag-suppress': '-diag-suppress', # 4.2.8.12 + '--diag-warn': '-diag-warn', # 4.2.8.13 + '--options-file': '-optf', # 4.2.8.17 + '--time': '-time', # 4.2.8.18 + '--qpp-config': '-qpp-config', # 4.2.8.19 + } + # Reverse map -short to --long options. + _FLAG_SHORT2LONG_WITHARGS = {v: k for k, v in _FLAG_LONG2SHORT_WITHARGS.items()} + + id = 'nvcc' + + def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, exe_wrapper: T.Optional['ExternalProgram'], + host_compiler: Compiler, info: 'MachineInfo', + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + super().__init__(ccache, exelist, version, for_machine, info, linker=linker, full_version=full_version, is_cross=is_cross) + self.exe_wrapper = exe_wrapper + self.host_compiler = host_compiler + self.base_options = host_compiler.base_options + self.warn_args = {level: self._to_host_flags(flags) for level, flags in host_compiler.warn_args.items()} + + @classmethod + def _shield_nvcc_list_arg(cls, arg: str, listmode: bool = True) -> str: + r""" + Shield an argument against both splitting by NVCC's list-argument + parse logic, and interpretation by any shell. + + NVCC seems to consider every comma , that is neither escaped by \ nor inside + a double-quoted string a split-point. Single-quotes do not provide protection + against splitting; In fact, after splitting they are \-escaped. Unfortunately, + double-quotes don't protect against shell expansion. What follows is a + complex dance to accommodate everybody. + """ + + SQ = "'" + DQ = '"' + CM = "," + BS = "\\" + DQSQ = DQ+SQ+DQ + quotable = set(string.whitespace+'"$`\\') + + if CM not in arg or not listmode: + if SQ not in arg: + # If any of the special characters "$`\ or whitespace are present, single-quote. + # Otherwise return bare. + if set(arg).intersection(quotable): + return SQ+arg+SQ + else: + return arg # Easy case: no splits, no quoting. + else: + # There are single quotes. Double-quote them, and single-quote the + # strings between them. + l = [cls._shield_nvcc_list_arg(s) for s in arg.split(SQ)] + l = sum([[s, DQSQ] for s in l][:-1], []) # Interleave l with DQSQs + return ''.join(l) + else: + # A comma is present, and list mode was active. + # We apply (what we guess is) the (primitive) NVCC splitting rule: + l = [''] + instring = False + argit = iter(arg) + for c in argit: + if c == CM and not instring: + l.append('') + elif c == DQ: + l[-1] += c + instring = not instring + elif c == BS: + try: + l[-1] += next(argit) + except StopIteration: + break + else: + l[-1] += c + + # Shield individual strings, without listmode, then return them with + # escaped commas between them. + l = [cls._shield_nvcc_list_arg(s, listmode=False) for s in l] + return r'\,'.join(l) + + @classmethod + def _merge_flags(cls, flags: T.List[str]) -> T.List[str]: + r""" + The flags to NVCC gets exceedingly verbose and unreadable when too many of them + are shielded with -Xcompiler. Merge consecutive -Xcompiler-wrapped arguments + into one. + """ + if len(flags) <= 1: + return flags + flagit = iter(flags) + xflags = [] + + def is_xcompiler_flag_isolated(flag: str) -> bool: + return flag == '-Xcompiler' + + def is_xcompiler_flag_glued(flag: str) -> bool: + return flag.startswith('-Xcompiler=') + + def is_xcompiler_flag(flag: str) -> bool: + return is_xcompiler_flag_isolated(flag) or is_xcompiler_flag_glued(flag) + + def get_xcompiler_val(flag: str, flagit: T.Iterator[str]) -> str: + if is_xcompiler_flag_glued(flag): + return flag[len('-Xcompiler='):] + else: + try: + return next(flagit) + except StopIteration: + return "" + + ingroup = False + for flag in flagit: + if not is_xcompiler_flag(flag): + ingroup = False + xflags.append(flag) + elif ingroup: + xflags[-1] += ',' + xflags[-1] += get_xcompiler_val(flag, flagit) + elif is_xcompiler_flag_isolated(flag): + ingroup = True + xflags.append(flag) + xflags.append(get_xcompiler_val(flag, flagit)) + elif is_xcompiler_flag_glued(flag): + ingroup = True + xflags.append(flag) + else: + raise ValueError("-Xcompiler flag merging failed, unknown argument form!") + return xflags + + def _to_host_flags(self, flags: T.List[str], phase: _Phase = _Phase.COMPILER) -> T.List[str]: + """ + Translate generic "GCC-speak" plus particular "NVCC-speak" flags to NVCC flags. + + NVCC's "short" flags have broad similarities to the GCC standard, but have + gratuitous, irritating differences. + """ + + xflags = [] + flagit = iter(flags) + + for flag in flagit: + # The CUDA Toolkit Documentation, in 4.1. Command Option Types and Notation, + # specifies that NVCC does not parse the standard flags as GCC does. It has + # its own strategy, to wit: + # + # nvcc recognizes three types of command options: boolean options, single + # value options, and list options. + # + # Boolean options do not have an argument; they are either specified on a + # command line or not. Single value options must be specified at most once, + # and list options may be repeated. Examples of each of these option types + # are, respectively: --verbose (switch to verbose mode), --output-file + # (specify output file), and --include-path (specify include path). + # + # Single value options and list options must have arguments, which must + # follow the name of the option itself by either one of more spaces or an + # equals character. When a one-character short name such as -I, -l, and -L + # is used, the value of the option may also immediately follow the option + # itself without being separated by spaces or an equal character. The + # individual values of list options may be separated by commas in a single + # instance of the option, or the option may be repeated, or any + # combination of these two cases. + # + # One strange consequence of this choice is that directory and filenames that + # contain commas (',') cannot be passed to NVCC (at least, not as easily as + # in GCC). Another strange consequence is that it is legal to supply flags + # such as + # + # -lpthread,rt,dl,util + # -l pthread,rt,dl,util + # -l=pthread,rt,dl,util + # + # and each of the above alternatives is equivalent to GCC-speak + # + # -lpthread -lrt -ldl -lutil + # -l pthread -l rt -l dl -l util + # -l=pthread -l=rt -l=dl -l=util + # + # *With the exception of commas in the name*, GCC-speak for these list flags + # is a strict subset of NVCC-speak, so we passthrough those flags. + # + # The -D macro-define flag is documented as somehow shielding commas from + # splitting a definition. Balanced parentheses, braces and single-quotes + # around the comma are not sufficient, but balanced double-quotes are. The + # shielding appears to work with -l, -I, -L flags as well, for instance. + # + # Since our goal is to replicate GCC-speak as much as possible, we check for + # commas in all list-arguments and shield them with double-quotes. We make + # an exception for -D (where this would be value-changing) and -U (because + # it isn't possible to define a macro with a comma in the name). + + if flag in self._FLAG_PASSTHRU_NOARGS: + xflags.append(flag) + continue + + # Handle breakup of flag-values into a flag-part and value-part. + if flag[:1] not in '-/': + # This is not a flag. It's probably a file input. Pass it through. + xflags.append(flag) + continue + elif flag[:1] == '/': + # This is ambiguously either an MVSC-style /switch or an absolute path + # to a file. For some magical reason the following works acceptably in + # both cases. + wrap = '"' if ',' in flag else '' + xflags.append(f'-X{phase.value}={wrap}{flag}{wrap}') + continue + elif len(flag) >= 2 and flag[0] == '-' and flag[1] in 'IDULlmOxmte': + # This is a single-letter short option. These options (with the + # exception of -o) are allowed to receive their argument with neither + # space nor = sign before them. Detect and separate them in that event. + if flag[2:3] == '': # -I something + try: + val = next(flagit) + except StopIteration: + pass + elif flag[2:3] == '=': # -I=something + val = flag[3:] + else: # -Isomething + val = flag[2:] + flag = flag[:2] # -I + elif flag in self._FLAG_LONG2SHORT_WITHARGS or \ + flag in self._FLAG_SHORT2LONG_WITHARGS: + # This is either -o or a multi-letter flag, and it is receiving its + # value isolated. + try: + val = next(flagit) # -o something + except StopIteration: + pass + elif flag.split('=', 1)[0] in self._FLAG_LONG2SHORT_WITHARGS or \ + flag.split('=', 1)[0] in self._FLAG_SHORT2LONG_WITHARGS: + # This is either -o or a multi-letter flag, and it is receiving its + # value after an = sign. + flag, val = flag.split('=', 1) # -o=something + # Some dependencies (e.g., BoostDependency) add unspaced "-isystem/usr/include" arguments + elif flag.startswith('-isystem'): + val = flag[8:].strip() + flag = flag[:8] + else: + # This is a flag, and it's foreign to NVCC. + # + # We do not know whether this GCC-speak flag takes an isolated + # argument. Assuming it does not (the vast majority indeed don't), + # wrap this argument in an -Xcompiler flag and send it down to NVCC. + if flag == '-ffast-math': + xflags.append('-use_fast_math') + xflags.append('-Xcompiler='+flag) + elif flag == '-fno-fast-math': + xflags.append('-ftz=false') + xflags.append('-prec-div=true') + xflags.append('-prec-sqrt=true') + xflags.append('-Xcompiler='+flag) + elif flag == '-freciprocal-math': + xflags.append('-prec-div=false') + xflags.append('-Xcompiler='+flag) + elif flag == '-fno-reciprocal-math': + xflags.append('-prec-div=true') + xflags.append('-Xcompiler='+flag) + else: + xflags.append('-Xcompiler='+self._shield_nvcc_list_arg(flag)) + # The above should securely handle GCC's -Wl, -Wa, -Wp, arguments. + continue + + assert val is not None # Should only trip if there is a missing argument. + + # Take care of the various NVCC-supported flags that need special handling. + flag = self._FLAG_LONG2SHORT_WITHARGS.get(flag, flag) + + if flag in {'-include', '-isystem', '-I', '-L', '-l'}: + # These flags are known to GCC, but list-valued in NVCC. They potentially + # require double-quoting to prevent NVCC interpreting the flags as lists + # when GCC would not have done so. + # + # We avoid doing this quoting for -D to avoid redefining macros and for + # -U because it isn't possible to define a macro with a comma in the name. + # -U with comma arguments is impossible in GCC-speak (and thus unambiguous + #in NVCC-speak, albeit unportable). + if len(flag) == 2: + xflags.append(flag+self._shield_nvcc_list_arg(val)) + elif flag == '-isystem' and val in self.host_compiler.get_default_include_dirs(): + # like GnuLikeCompiler, we have to filter out include directories specified + # with -isystem that overlap with the host compiler's search path + pass + else: + xflags.append(flag) + xflags.append(self._shield_nvcc_list_arg(val)) + elif flag == '-O': + # Handle optimization levels GCC knows about that NVCC does not. + if val == 'fast': + xflags.append('-O3') + xflags.append('-use_fast_math') + xflags.append('-Xcompiler') + xflags.append(flag+val) + elif val in {'s', 'g', 'z'}: + xflags.append('-Xcompiler') + xflags.append(flag+val) + else: + xflags.append(flag+val) + elif flag in {'-D', '-U', '-m', '-t'}: + xflags.append(flag+val) # For style, keep glued. + elif flag in {'-std'}: + xflags.append(flag+'='+val) # For style, keep glued. + else: + xflags.append(flag) + xflags.append(val) + + return self._merge_flags(xflags) + + def needs_static_linker(self) -> bool: + return False + + def thread_link_flags(self, environment: 'Environment') -> T.List[str]: + return self._to_host_flags(self.host_compiler.thread_link_flags(environment), _Phase.LINKER) + + def sanity_check(self, work_dir: str, env: 'Environment') -> None: + mlog.debug('Sanity testing ' + self.get_display_language() + ' compiler:', ' '.join(self.exelist)) + mlog.debug('Is cross compiler: %s.' % str(self.is_cross)) + + sname = 'sanitycheckcuda.cu' + code = r''' + #include + #include + + __global__ void kernel (void) {} + + int main(void){ + struct cudaDeviceProp prop; + int count, i; + cudaError_t ret = cudaGetDeviceCount(&count); + if(ret != cudaSuccess){ + fprintf(stderr, "%d\n", (int)ret); + }else{ + for(i=0;i T.Tuple[bool, bool]: + if extra_args is None: + extra_args = [] + fargs = {'prefix': prefix, 'header': hname, 'symbol': symbol} + # Check if it's a C-like symbol + t = '''{prefix} + #include <{header}> + int main(void) {{ + /* If it's not defined as a macro, try to use as a symbol */ + #ifndef {symbol} + {symbol}; + #endif + return 0; + }}''' + found, cached = self.compiles(t.format_map(fargs), env, extra_args=extra_args, dependencies=dependencies) + if found: + return True, cached + # Check if it's a class or a template + t = '''{prefix} + #include <{header}> + using {symbol}; + int main(void) {{ + return 0; + }}''' + return self.compiles(t.format_map(fargs), env, extra_args=extra_args, dependencies=dependencies) + + _CPP14_VERSION = '>=9.0' + _CPP17_VERSION = '>=11.0' + _CPP20_VERSION = '>=12.0' + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = super().get_options() + std_key = OptionKey('std', machine=self.for_machine, lang=self.language) + ccbindir_key = OptionKey('ccbindir', machine=self.for_machine, lang=self.language) + + cpp_stds = ['none', 'c++03', 'c++11'] + if version_compare(self.version, self._CPP14_VERSION): + cpp_stds += ['c++14'] + if version_compare(self.version, self._CPP17_VERSION): + cpp_stds += ['c++17'] + if version_compare(self.version, self._CPP20_VERSION): + cpp_stds += ['c++20'] + + opts.update({ + std_key: coredata.UserComboOption('C++ language standard to use with CUDA', + cpp_stds, 'none'), + ccbindir_key: coredata.UserStringOption('CUDA non-default toolchain directory to use (-ccbin)', + ''), + }) + return opts + + def _to_host_compiler_options(self, options: 'KeyedOptionDictType') -> 'KeyedOptionDictType': + """ + Convert an NVCC Option set to a host compiler's option set. + """ + + # We must strip the -std option from the host compiler option set, as NVCC has + # its own -std flag that may not agree with the host compiler's. + host_options = {key: options.get(key, opt) for key, opt in self.host_compiler.get_options().items()} + std_key = OptionKey('std', machine=self.for_machine, lang=self.host_compiler.language) + overrides = {std_key: 'none'} + return OptionOverrideProxy(overrides, host_options) + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = self.get_ccbin_args(options) + # On Windows, the version of the C++ standard used by nvcc is dictated by + # the combination of CUDA version and MSVC version; the --std= is thus ignored + # and attempting to use it will result in a warning: https://stackoverflow.com/a/51272091/741027 + if not is_windows(): + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append('--std=' + std.value) + + return args + self._to_host_flags(self.host_compiler.get_option_compile_args(self._to_host_compiler_options(options))) + + def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args = self.get_ccbin_args(options) + return args + self._to_host_flags(self.host_compiler.get_option_link_args(self._to_host_compiler_options(options)), _Phase.LINKER) + + def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str, + suffix: str, soversion: str, + darwin_versions: T.Tuple[str, str]) -> T.List[str]: + return self._to_host_flags(self.host_compiler.get_soname_args( + env, prefix, shlib_name, suffix, soversion, darwin_versions), _Phase.LINKER) + + def get_compile_only_args(self) -> T.List[str]: + return ['-c'] + + def get_no_optimization_args(self) -> T.List[str]: + return ['-O0'] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + # alternatively, consider simply redirecting this to the host compiler, which would + # give us more control over options like "optimize for space" (which nvcc doesn't support): + # return self._to_host_flags(self.host_compiler.get_optimization_args(optimization_level)) + return cuda_optimization_args[optimization_level] + + def sanitizer_compile_args(self, value: str) -> T.List[str]: + return self._to_host_flags(self.host_compiler.sanitizer_compile_args(value)) + + def sanitizer_link_args(self, value: str) -> T.List[str]: + return self._to_host_flags(self.host_compiler.sanitizer_link_args(value)) + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + return cuda_debug_args[is_debug] + + def get_werror_args(self) -> T.List[str]: + return ['-Werror=cross-execution-space-call,deprecated-declarations,reorder'] + + def get_warn_args(self, level: str) -> T.List[str]: + return self.warn_args[level] + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + # nvcc doesn't support msvc's "Edit and Continue" PDB format; "downgrade" to + # a regular PDB to avoid cl's warning to that effect (D9025 : overriding '/ZI' with '/Zi') + host_args = ['/Zi' if arg == '/ZI' else arg for arg in self.host_compiler.get_buildtype_args(buildtype)] + return cuda_buildtype_args[buildtype] + self._to_host_flags(host_args) + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if path == '': + path = '.' + return ['-isystem=' + path] if is_system else ['-I' + path] + + def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[str]: + return self._to_host_flags(self.host_compiler.get_compile_debugfile_args(rel_obj, pch)) + + def get_link_debugfile_args(self, targetfile: str) -> T.List[str]: + return self._to_host_flags(self.host_compiler.get_link_debugfile_args(targetfile), _Phase.LINKER) + + def get_depfile_suffix(self) -> str: + return 'd' + + def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]: + return self._to_host_flags(self.host_compiler.get_buildtype_linker_args(buildtype), _Phase.LINKER) + + def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, + rpath_paths: T.Tuple[str, ...], build_rpath: str, + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: + (rpath_args, rpath_dirs_to_remove) = self.host_compiler.build_rpath_args( + env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) + return (self._to_host_flags(rpath_args, _Phase.LINKER), rpath_dirs_to_remove) + + def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]: + return args + + def get_pic_args(self) -> T.List[str]: + return self._to_host_flags(self.host_compiler.get_pic_args()) + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + return [] + + def get_output_args(self, target: str) -> T.List[str]: + return ['-o', target] + + def get_std_exe_link_args(self) -> T.List[str]: + return self._to_host_flags(self.host_compiler.get_std_exe_link_args(), _Phase.LINKER) + + def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], + libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]: + return ['-l' + libname] # FIXME + + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + return self._to_host_flags(self.host_compiler.get_crt_compile_args(crt_val, buildtype)) + + def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]: + # nvcc defaults to static, release version of msvc runtime and provides no + # native option to override it; override it with /NODEFAULTLIB + host_link_arg_overrides = [] + host_crt_compile_args = self.host_compiler.get_crt_compile_args(crt_val, buildtype) + if any(arg in {'/MDd', '/MD', '/MTd'} for arg in host_crt_compile_args): + host_link_arg_overrides += ['/NODEFAULTLIB:LIBCMT.lib'] + return self._to_host_flags(host_link_arg_overrides + self.host_compiler.get_crt_link_args(crt_val, buildtype), _Phase.LINKER) + + def get_target_link_args(self, target: 'BuildTarget') -> T.List[str]: + return self._to_host_flags(super().get_target_link_args(target), _Phase.LINKER) + + def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]: + return self._to_host_flags(super().get_dependency_compile_args(dep)) + + def get_dependency_link_args(self, dep: 'Dependency') -> T.List[str]: + return self._to_host_flags(super().get_dependency_link_args(dep), _Phase.LINKER) + + def get_ccbin_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + key = OptionKey('ccbindir', machine=self.for_machine, lang=self.language) + ccbindir = options[key].value + if isinstance(ccbindir, str) and ccbindir != '': + return [self._shield_nvcc_list_arg('-ccbin='+ccbindir, False)] + else: + return [] + + def get_profile_generate_args(self) -> T.List[str]: + return ['-Xcompiler=' + x for x in self.host_compiler.get_profile_generate_args()] + + def get_profile_use_args(self) -> T.List[str]: + return ['-Xcompiler=' + x for x in self.host_compiler.get_profile_use_args()] + + def get_assert_args(self, disable: bool) -> T.List[str]: + return self.host_compiler.get_assert_args(disable) diff --git a/vendored-meson/meson/mesonbuild/compilers/cython.py b/vendored-meson/meson/mesonbuild/compilers/cython.py new file mode 100644 index 000000000000..9bbfebeb0ef9 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/cython.py @@ -0,0 +1,96 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright © 2021 Intel Corporation +from __future__ import annotations + +"""Abstraction for Cython language compilers.""" + +import typing as T + +from .. import coredata +from ..mesonlib import EnvironmentException, OptionKey, version_compare +from .compilers import Compiler + +if T.TYPE_CHECKING: + from ..coredata import MutableKeyedOptionDictType, KeyedOptionDictType + from ..environment import Environment + + +class CythonCompiler(Compiler): + + """Cython Compiler.""" + + language = 'cython' + id = 'cython' + + def needs_static_linker(self) -> bool: + # We transpile into C, so we don't need any linker + return False + + def get_always_args(self) -> T.List[str]: + return ['--fast-fail'] + + def get_werror_args(self) -> T.List[str]: + return ['-Werror'] + + def get_output_args(self, outputname: str) -> T.List[str]: + return ['-o', outputname] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + # Cython doesn't have optimization levels itself, the underlying + # compiler might though + return [] + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + if version_compare(self.version, '>=0.29.33'): + return ['-M'] + return [] + + def get_depfile_suffix(self) -> str: + return 'dep' + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + code = 'print("hello world")' + with self.cached_compile(code, environment.coredata) as p: + if p.returncode != 0: + raise EnvironmentException(f'Cython compiler {self.id!r} cannot compile programs') + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + # Cython doesn't implement this, but Meson requires an implementation + return [] + + def get_pic_args(self) -> T.List[str]: + # We can lie here, it's fine + return [] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + new: T.List[str] = [] + for i in parameter_list: + new.append(i) + + return new + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = super().get_options() + opts.update({ + OptionKey('version', machine=self.for_machine, lang=self.language): coredata.UserComboOption( + 'Python version to target', + ['2', '3'], + '3', + ), + OptionKey('language', machine=self.for_machine, lang=self.language): coredata.UserComboOption( + 'Output C or C++ files', + ['c', 'cpp'], + 'c', + ) + }) + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args: T.List[str] = [] + key = options[OptionKey('version', machine=self.for_machine, lang=self.language)] + args.append(f'-{key.value}') + lang = options[OptionKey('language', machine=self.for_machine, lang=self.language)] + if lang.value == 'cpp': + args.append('--cplus') + return args diff --git a/vendored-meson/meson/mesonbuild/compilers/d.py b/vendored-meson/meson/mesonbuild/compilers/d.py new file mode 100644 index 000000000000..08ebb7583e06 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/d.py @@ -0,0 +1,1033 @@ +# Copyright 2012-2022 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import os.path +import re +import subprocess +import typing as T + +from .. import mesonlib +from .. import mlog +from ..arglist import CompilerArgs +from ..linkers import RSPFileSyntax +from ..mesonlib import ( + EnvironmentException, version_compare, OptionKey, is_windows +) + +from . import compilers +from .compilers import ( + d_dmd_buildtype_args, + d_gdc_buildtype_args, + d_ldc_buildtype_args, + clike_debug_args, + Compiler, + CompileCheckMode, +) +from .mixins.gnu import GnuCompiler +from .mixins.gnu import gnu_common_warning_args + +if T.TYPE_CHECKING: + from ..dependencies import Dependency + from ..programs import ExternalProgram + from ..envconfig import MachineInfo + from ..environment import Environment + from ..linkers.linkers import DynamicLinker + from ..mesonlib import MachineChoice + + CompilerMixinBase = Compiler +else: + CompilerMixinBase = object + +d_feature_args: T.Dict[str, T.Dict[str, str]] = { + 'gcc': { + 'unittest': '-funittest', + 'debug': '-fdebug', + 'version': '-fversion', + 'import_dir': '-J' + }, + 'llvm': { + 'unittest': '-unittest', + 'debug': '-d-debug', + 'version': '-d-version', + 'import_dir': '-J' + }, + 'dmd': { + 'unittest': '-unittest', + 'debug': '-debug', + 'version': '-version', + 'import_dir': '-J' + } +} + +ldc_optimization_args: T.Dict[str, T.List[str]] = { + 'plain': [], + '0': [], + 'g': [], + '1': ['-O1'], + '2': ['-O2'], + '3': ['-O3'], + 's': ['-Oz'], +} + +dmd_optimization_args: T.Dict[str, T.List[str]] = { + 'plain': [], + '0': [], + 'g': [], + '1': ['-O'], + '2': ['-O'], + '3': ['-O'], + 's': ['-O'], +} + + +class DmdLikeCompilerMixin(CompilerMixinBase): + + """Mixin class for DMD and LDC. + + LDC has a number of DMD like arguments, and this class allows for code + sharing between them as makes sense. + """ + + def __init__(self, dmd_frontend_version: T.Optional[str]): + if dmd_frontend_version is None: + self._dmd_has_depfile = False + else: + # -makedeps switch introduced in 2.095 frontend + self._dmd_has_depfile = version_compare(dmd_frontend_version, ">=2.095.0") + + if T.TYPE_CHECKING: + mscrt_args: T.Dict[str, T.List[str]] = {} + + def _get_target_arch_args(self) -> T.List[str]: ... + + LINKER_PREFIX = '-L=' + + def get_output_args(self, outputname: str) -> T.List[str]: + return ['-of=' + outputname] + + def get_linker_output_args(self, outputname: str) -> T.List[str]: + return ['-of=' + outputname] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if path == "": + path = "." + return ['-I=' + path] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:3] == '-I=': + parameter_list[idx] = i[:3] + os.path.normpath(os.path.join(build_dir, i[3:])) + if i[:4] == '-L-L': + parameter_list[idx] = i[:4] + os.path.normpath(os.path.join(build_dir, i[4:])) + if i[:5] == '-L=-L': + parameter_list[idx] = i[:5] + os.path.normpath(os.path.join(build_dir, i[5:])) + if i[:6] == '-Wl,-L': + parameter_list[idx] = i[:6] + os.path.normpath(os.path.join(build_dir, i[6:])) + + return parameter_list + + def get_warn_args(self, level: str) -> T.List[str]: + return ['-wi'] + + def get_werror_args(self) -> T.List[str]: + return ['-w'] + + def get_coverage_args(self) -> T.List[str]: + return ['-cov'] + + def get_coverage_link_args(self) -> T.List[str]: + return [] + + def get_preprocess_only_args(self) -> T.List[str]: + return ['-E'] + + def get_compile_only_args(self) -> T.List[str]: + return ['-c'] + + def get_depfile_suffix(self) -> str: + return 'deps' + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + if self._dmd_has_depfile: + return [f'-makedeps={outfile}'] + return [] + + def get_pic_args(self) -> T.List[str]: + if self.info.is_windows(): + return [] + return ['-fPIC'] + + def get_feature_args(self, kwargs: T.Dict[str, T.Any], build_to_src: str) -> T.List[str]: + # TODO: using a TypeDict here would improve this + res: T.List[str] = [] + # get_feature_args can be called multiple times for the same target when there is generated source + # so we have to copy the kwargs (target.d_features) dict before popping from it + kwargs = kwargs.copy() + if 'unittest' in kwargs: + unittest = kwargs.pop('unittest') + unittest_arg = d_feature_args[self.id]['unittest'] + if not unittest_arg: + raise EnvironmentException('D compiler %s does not support the "unittest" feature.' % self.name_string()) + if unittest: + res.append(unittest_arg) + + if 'debug' in kwargs: + debug_level = -1 + debugs = kwargs.pop('debug') + if not isinstance(debugs, list): + debugs = [debugs] + + debug_arg = d_feature_args[self.id]['debug'] + if not debug_arg: + raise EnvironmentException('D compiler %s does not support conditional debug identifiers.' % self.name_string()) + + # Parse all debug identifiers and the largest debug level identifier + for d in debugs: + if isinstance(d, int): + if d > debug_level: + debug_level = d + elif isinstance(d, str) and d.isdigit(): + if int(d) > debug_level: + debug_level = int(d) + else: + res.append(f'{debug_arg}={d}') + + if debug_level >= 0: + res.append(f'{debug_arg}={debug_level}') + + if 'versions' in kwargs: + version_level = -1 + versions = kwargs.pop('versions') + if not isinstance(versions, list): + versions = [versions] + + version_arg = d_feature_args[self.id]['version'] + if not version_arg: + raise EnvironmentException('D compiler %s does not support conditional version identifiers.' % self.name_string()) + + # Parse all version identifiers and the largest version level identifier + for v in versions: + if isinstance(v, int): + if v > version_level: + version_level = v + elif isinstance(v, str) and v.isdigit(): + if int(v) > version_level: + version_level = int(v) + else: + res.append(f'{version_arg}={v}') + + if version_level >= 0: + res.append(f'{version_arg}={version_level}') + + if 'import_dirs' in kwargs: + import_dirs = kwargs.pop('import_dirs') + if not isinstance(import_dirs, list): + import_dirs = [import_dirs] + + import_dir_arg = d_feature_args[self.id]['import_dir'] + if not import_dir_arg: + raise EnvironmentException('D compiler %s does not support the "string import directories" feature.' % self.name_string()) + for idir_obj in import_dirs: + basedir = idir_obj.get_curdir() + for idir in idir_obj.get_incdirs(): + bldtreedir = os.path.join(basedir, idir) + # Avoid superfluous '/.' at the end of paths when d is '.' + if idir not in ('', '.'): + expdir = bldtreedir + else: + expdir = basedir + srctreedir = os.path.join(build_to_src, expdir) + res.append(f'{import_dir_arg}{srctreedir}') + res.append(f'{import_dir_arg}{bldtreedir}') + + if kwargs: + raise EnvironmentException('Unknown D compiler feature(s) selected: %s' % ', '.join(kwargs.keys())) + + return res + + def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]: + if buildtype != 'plain': + return self._get_target_arch_args() + return [] + + def gen_import_library_args(self, implibname: str) -> T.List[str]: + return self.linker.import_library_args(implibname) + + def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str, + rpath_paths: T.Tuple[str, ...], build_rpath: str, + install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]: + if self.info.is_windows(): + return ([], set()) + + # GNU ld, solaris ld, and lld acting like GNU ld + if self.linker.id.startswith('ld'): + # The way that dmd and ldc pass rpath to gcc is different than we would + # do directly, each argument -rpath and the value to rpath, need to be + # split into two separate arguments both prefaced with the -L=. + args: T.List[str] = [] + (rpath_args, rpath_dirs_to_remove) = super().build_rpath_args( + env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) + for r in rpath_args: + if ',' in r: + a, b = r.split(',', maxsplit=1) + args.append(a) + args.append(self.LINKER_PREFIX + b) + else: + args.append(r) + return (args, rpath_dirs_to_remove) + + return super().build_rpath_args( + env, build_dir, from_dir, rpath_paths, build_rpath, install_rpath) + + @classmethod + def _translate_args_to_nongnu(cls, args: T.List[str], info: MachineInfo, link_id: str) -> T.List[str]: + # Translate common arguments to flags the LDC/DMD compilers + # can understand. + # The flags might have been added by pkg-config files, + # and are therefore out of the user's control. + dcargs: T.List[str] = [] + # whether we hit a linker argument that expect another arg + # see the comment in the "-L" section + link_expect_arg = False + link_flags_with_arg = [ + '-rpath', '-rpath-link', '-soname', '-compatibility_version', '-current_version', + ] + for arg in args: + # Translate OS specific arguments first. + osargs: T.List[str] = [] + if info.is_windows(): + osargs = cls.translate_arg_to_windows(arg) + elif info.is_darwin(): + osargs = cls._translate_arg_to_osx(arg) + if osargs: + dcargs.extend(osargs) + continue + + # Translate common D arguments here. + if arg == '-pthread': + continue + if arg.startswith('-fstack-protector'): + continue + if arg.startswith('-D') and not (arg == '-D' or arg.startswith(('-Dd', '-Df'))): + # ignore all '-D*' flags (like '-D_THREAD_SAFE') + # unless they are related to documentation + continue + if arg.startswith('-Wl,'): + # Translate linker arguments here. + linkargs = arg[arg.index(',') + 1:].split(',') + for la in linkargs: + dcargs.append('-L=' + la.strip()) + continue + elif arg.startswith(('-link-defaultlib', '-linker', '-link-internally', '-linkonce-templates', '-lib')): + # these are special arguments to the LDC linker call, + # arguments like "-link-defaultlib-shared" do *not* + # denote a library to be linked, but change the default + # Phobos/DRuntime linking behavior, while "-linker" sets the + # default linker. + dcargs.append(arg) + continue + elif arg.startswith('-l'): + # translate library link flag + dcargs.append('-L=' + arg) + continue + elif arg.startswith('-isystem'): + # translate -isystem system include path + # this flag might sometimes be added by C library Cflags via + # pkg-config. + # NOTE: -isystem and -I are not 100% equivalent, so this is just + # a workaround for the most common cases. + if arg.startswith('-isystem='): + dcargs.append('-I=' + arg[9:]) + else: + dcargs.append('-I' + arg[8:]) + continue + elif arg.startswith('-idirafter'): + # same as -isystem, but appends the path instead + if arg.startswith('-idirafter='): + dcargs.append('-I=' + arg[11:]) + else: + dcargs.append('-I' + arg[10:]) + continue + elif arg.startswith('-L'): + # The D linker expect library search paths in the form of -L=-L/path (the '=' is optional). + # + # This function receives a mix of arguments already prepended + # with -L for the D linker driver and other linker arguments. + # The arguments starting with -L can be: + # - library search path (with or without a second -L) + # - it can come from pkg-config (a single -L) + # - or from the user passing linker flags (-L-L would be expected) + # - arguments like "-L=-rpath" that expect a second argument (also prepended with -L) + # - arguments like "-L=@rpath/xxx" without a second argument (on Apple platform) + # - arguments like "-L=/SUBSYSTEM:CONSOLE (for Windows linker) + # + # The logic that follows tries to detect all these cases (some may be missing) + # in order to prepend a -L only for the library search paths with a single -L + + if arg.startswith('-L='): + suffix = arg[3:] + else: + suffix = arg[2:] + + if link_expect_arg: + # flags like rpath and soname expect a path or filename respectively, + # we must not alter it (i.e. prefixing with -L for a lib search path) + dcargs.append(arg) + link_expect_arg = False + continue + + if suffix in link_flags_with_arg: + link_expect_arg = True + + if suffix.startswith('-') or suffix.startswith('@'): + # this is not search path + dcargs.append(arg) + continue + + # linker flag such as -L=/DEBUG must pass through + if info.is_windows() and link_id == 'link' and suffix.startswith('/'): + dcargs.append(arg) + continue + + # Make sure static library files are passed properly to the linker. + if arg.endswith('.a') or arg.endswith('.lib'): + if len(suffix) > 0 and not suffix.startswith('-'): + dcargs.append('-L=' + suffix) + continue + + dcargs.append('-L=' + arg) + continue + elif not arg.startswith('-') and arg.endswith(('.a', '.lib')): + # ensure static libraries are passed through to the linker + dcargs.append('-L=' + arg) + continue + else: + dcargs.append(arg) + + return dcargs + + @classmethod + def translate_arg_to_windows(cls, arg: str) -> T.List[str]: + args: T.List[str] = [] + if arg.startswith('-Wl,'): + # Translate linker arguments here. + linkargs = arg[arg.index(',') + 1:].split(',') + for la in linkargs: + if la.startswith('--out-implib='): + # Import library name + args.append('-L=/IMPLIB:' + la[13:].strip()) + elif arg.startswith('-mscrtlib='): + args.append(arg) + mscrtlib = arg[10:].lower() + if cls is LLVMDCompiler: + # Default crt libraries for LDC2 must be excluded for other + # selected crt options. + if mscrtlib != 'libcmt': + args.append('-L=/NODEFAULTLIB:libcmt') + args.append('-L=/NODEFAULTLIB:libvcruntime') + + # Fixes missing definitions for printf-functions in VS2017 + if mscrtlib.startswith('msvcrt'): + args.append('-L=/DEFAULTLIB:legacy_stdio_definitions.lib') + + return args + + @classmethod + def _translate_arg_to_osx(cls, arg: str) -> T.List[str]: + args: T.List[str] = [] + if arg.startswith('-install_name'): + args.append('-L=' + arg) + return args + + @classmethod + def _unix_args_to_native(cls, args: T.List[str], info: MachineInfo, link_id: str = '') -> T.List[str]: + return cls._translate_args_to_nongnu(args, info, link_id) + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + ddebug_args = [] + if is_debug: + ddebug_args = [d_feature_args[self.id]['debug']] + + return clike_debug_args[is_debug] + ddebug_args + + def _get_crt_args(self, crt_val: str, buildtype: str) -> T.List[str]: + if not self.info.is_windows(): + return [] + + if crt_val in self.mscrt_args: + return self.mscrt_args[crt_val] + assert crt_val in {'from_buildtype', 'static_from_buildtype'} + + dbg = 'mdd' + rel = 'md' + if crt_val == 'static_from_buildtype': + dbg = 'mtd' + rel = 'mt' + + # Match what build type flags used to do. + if buildtype == 'plain': + return [] + elif buildtype == 'debug': + return self.mscrt_args[dbg] + elif buildtype == 'debugoptimized': + return self.mscrt_args[rel] + elif buildtype == 'release': + return self.mscrt_args[rel] + elif buildtype == 'minsize': + return self.mscrt_args[rel] + else: + assert buildtype == 'custom' + raise EnvironmentException('Requested C runtime based on buildtype, but buildtype is "custom".') + + def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str, + suffix: str, soversion: str, + darwin_versions: T.Tuple[str, str]) -> T.List[str]: + sargs = super().get_soname_args(env, prefix, shlib_name, suffix, + soversion, darwin_versions) + + # LDC and DMD actually do use a linker, but they proxy all of that with + # their own arguments + soargs: T.List[str] = [] + if self.linker.id.startswith('ld.'): + for arg in sargs: + a, b = arg.split(',', maxsplit=1) + soargs.append(a) + soargs.append(self.LINKER_PREFIX + b) + return soargs + elif self.linker.id.startswith('ld64'): + for arg in sargs: + if not arg.startswith(self.LINKER_PREFIX): + soargs.append(self.LINKER_PREFIX + arg) + else: + soargs.append(arg) + return soargs + else: + return sargs + + def get_allow_undefined_link_args(self) -> T.List[str]: + args = self.linker.get_allow_undefined_args() + if self.info.is_darwin(): + # On macOS we're passing these options to the C compiler, but + # they're linker options and need -Wl, so clang/gcc knows what to + # do with them. I'm assuming, but don't know for certain, that + # ldc/dmd do some kind of mapping internally for arguments they + # understand, but pass arguments they don't understand directly. + args = [a.replace('-L=', '-Xcc=-Wl,') for a in args] + return args + + +class DCompilerArgs(CompilerArgs): + prepend_prefixes = ('-I', '-L') + dedup2_prefixes = ('-I', ) + + +class DCompiler(Compiler): + mscrt_args = { + 'none': ['-mscrtlib='], + 'md': ['-mscrtlib=msvcrt'], + 'mdd': ['-mscrtlib=msvcrtd'], + 'mt': ['-mscrtlib=libcmt'], + 'mtd': ['-mscrtlib=libcmtd'], + } + + language = 'd' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', arch: str, *, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, + is_cross: bool = False): + super().__init__([], exelist, version, for_machine, info, linker=linker, + full_version=full_version, is_cross=is_cross) + self.arch = arch + self.exe_wrapper = exe_wrapper + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + source_name = os.path.join(work_dir, 'sanity.d') + output_name = os.path.join(work_dir, 'dtest') + with open(source_name, 'w', encoding='utf-8') as ofile: + ofile.write('''void main() { }''') + pc = subprocess.Popen(self.exelist + self.get_output_args(output_name) + self._get_target_arch_args() + [source_name], cwd=work_dir) + pc.wait() + if pc.returncode != 0: + raise EnvironmentException('D compiler %s cannot compile programs.' % self.name_string()) + if self.is_cross: + if self.exe_wrapper is None: + # Can't check if the binaries run so we have to assume they do + return + cmdlist = self.exe_wrapper.get_command() + [output_name] + else: + cmdlist = [output_name] + if subprocess.call(cmdlist) != 0: + raise EnvironmentException('Executables created by D compiler %s are not runnable.' % self.name_string()) + + def needs_static_linker(self) -> bool: + return True + + def get_depfile_suffix(self) -> str: + return 'deps' + + def get_pic_args(self) -> T.List[str]: + if self.info.is_windows(): + return [] + return ['-fPIC'] + + def get_feature_args(self, kwargs: T.Dict[str, T.Any], build_to_src: str) -> T.List[str]: + # TODO: using a TypeDict here would improve this + res: T.List[str] = [] + # get_feature_args can be called multiple times for the same target when there is generated source + # so we have to copy the kwargs (target.d_features) dict before popping from it + kwargs = kwargs.copy() + if 'unittest' in kwargs: + unittest = kwargs.pop('unittest') + unittest_arg = d_feature_args[self.id]['unittest'] + if not unittest_arg: + raise EnvironmentException('D compiler %s does not support the "unittest" feature.' % self.name_string()) + if unittest: + res.append(unittest_arg) + + if 'debug' in kwargs: + debug_level = -1 + debugs = kwargs.pop('debug') + if not isinstance(debugs, list): + debugs = [debugs] + + debug_arg = d_feature_args[self.id]['debug'] + if not debug_arg: + raise EnvironmentException('D compiler %s does not support conditional debug identifiers.' % self.name_string()) + + # Parse all debug identifiers and the largest debug level identifier + for d in debugs: + if isinstance(d, int): + if d > debug_level: + debug_level = d + elif isinstance(d, str) and d.isdigit(): + if int(d) > debug_level: + debug_level = int(d) + else: + res.append(f'{debug_arg}={d}') + + if debug_level >= 0: + res.append(f'{debug_arg}={debug_level}') + + if 'versions' in kwargs: + version_level = -1 + versions = kwargs.pop('versions') + if not isinstance(versions, list): + versions = [versions] + + version_arg = d_feature_args[self.id]['version'] + if not version_arg: + raise EnvironmentException('D compiler %s does not support conditional version identifiers.' % self.name_string()) + + # Parse all version identifiers and the largest version level identifier + for v in versions: + if isinstance(v, int): + if v > version_level: + version_level = v + elif isinstance(v, str) and v.isdigit(): + if int(v) > version_level: + version_level = int(v) + else: + res.append(f'{version_arg}={v}') + + if version_level >= 0: + res.append(f'{version_arg}={version_level}') + + if 'import_dirs' in kwargs: + import_dirs = kwargs.pop('import_dirs') + if not isinstance(import_dirs, list): + import_dirs = [import_dirs] + + import_dir_arg = d_feature_args[self.id]['import_dir'] + if not import_dir_arg: + raise EnvironmentException('D compiler %s does not support the "string import directories" feature.' % self.name_string()) + for idir_obj in import_dirs: + basedir = idir_obj.get_curdir() + for idir in idir_obj.get_incdirs(): + bldtreedir = os.path.join(basedir, idir) + # Avoid superfluous '/.' at the end of paths when d is '.' + if idir not in ('', '.'): + expdir = bldtreedir + else: + expdir = basedir + srctreedir = os.path.join(build_to_src, expdir) + res.append(f'{import_dir_arg}{srctreedir}') + res.append(f'{import_dir_arg}{bldtreedir}') + + if kwargs: + raise EnvironmentException('Unknown D compiler feature(s) selected: %s' % ', '.join(kwargs.keys())) + + return res + + def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]: + if buildtype != 'plain': + return self._get_target_arch_args() + return [] + + def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> DCompilerArgs: + return DCompilerArgs(self, args) + + def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + return self.compiles('int i;\n', env, extra_args=args) + + def _get_target_arch_args(self) -> T.List[str]: + # LDC2 on Windows targets to current OS architecture, but + # it should follow the target specified by the MSVC toolchain. + if self.info.is_windows(): + if self.arch == 'x86_64': + return ['-m64'] + return ['-m32'] + return [] + + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + return [] + + def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]: + return [] + + def _get_compile_extra_args(self, extra_args: T.Union[T.List[str], T.Callable[[CompileCheckMode], T.List[str]], None] = None) -> T.List[str]: + args = self._get_target_arch_args() + if extra_args: + if callable(extra_args): + extra_args = extra_args(CompileCheckMode.COMPILE) + if isinstance(extra_args, list): + args.extend(extra_args) + elif isinstance(extra_args, str): + args.append(extra_args) + return args + + def run(self, code: 'mesonlib.FileOrString', env: 'Environment', *, + extra_args: T.Union[T.List[str], T.Callable[[CompileCheckMode], T.List[str]], None] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> compilers.RunResult: + need_exe_wrapper = env.need_exe_wrapper(self.for_machine) + if need_exe_wrapper and self.exe_wrapper is None: + raise compilers.CrossNoRunException('Can not run test applications in this cross environment.') + extra_args = self._get_compile_extra_args(extra_args) + with self._build_wrapper(code, env, extra_args, dependencies, mode=CompileCheckMode.LINK, want_output=True) as p: + if p.returncode != 0: + mlog.debug(f'Could not compile test file {p.input_name}: {p.returncode}\n') + return compilers.RunResult(False) + if need_exe_wrapper: + cmdlist = self.exe_wrapper.get_command() + [p.output_name] + else: + cmdlist = [p.output_name] + try: + pe, so, se = mesonlib.Popen_safe(cmdlist) + except Exception as e: + mlog.debug(f'Could not run: {cmdlist} (error: {e})\n') + return compilers.RunResult(False) + + mlog.debug('Program stdout:\n') + mlog.debug(so) + mlog.debug('Program stderr:\n') + mlog.debug(se) + return compilers.RunResult(True, pe.returncode, so, se) + + def sizeof(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[int, bool]: + if extra_args is None: + extra_args = [] + t = f''' + import std.stdio : writeln; + {prefix} + void main() {{ + writeln(({typename}).sizeof); + }} + ''' + res = self.cached_run(t, env, extra_args=extra_args, + dependencies=dependencies) + if not res.compiled: + return -1, False + if res.returncode != 0: + raise mesonlib.EnvironmentException('Could not run sizeof test binary.') + return int(res.stdout), res.cached + + def alignment(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[int, bool]: + if extra_args is None: + extra_args = [] + t = f''' + import std.stdio : writeln; + {prefix} + void main() {{ + writeln(({typename}).alignof); + }} + ''' + res = self.run(t, env, extra_args=extra_args, + dependencies=dependencies) + if not res.compiled: + raise mesonlib.EnvironmentException('Could not compile alignment test.') + if res.returncode != 0: + raise mesonlib.EnvironmentException('Could not run alignment test binary.') + align = int(res.stdout) + if align == 0: + raise mesonlib.EnvironmentException(f'Could not determine alignment of {typename}. Sorry. You might want to file a bug.') + return align, res.cached + + def has_header(self, hname: str, prefix: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[['CompileCheckMode'], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + disable_cache: bool = False) -> T.Tuple[bool, bool]: + + extra_args = self._get_compile_extra_args(extra_args) + code = f'''{prefix} + import {hname}; + ''' + return self.compiles(code, env, extra_args=extra_args, + dependencies=dependencies, mode=CompileCheckMode.COMPILE, disable_cache=disable_cache) + +class GnuDCompiler(GnuCompiler, DCompiler): + + # we mostly want DCompiler, but that gives us the Compiler.LINKER_PREFIX instead + LINKER_PREFIX = GnuCompiler.LINKER_PREFIX + id = 'gcc' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', arch: str, *, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, + is_cross: bool = False): + DCompiler.__init__(self, exelist, version, for_machine, info, arch, + exe_wrapper=exe_wrapper, linker=linker, + full_version=full_version, is_cross=is_cross) + GnuCompiler.__init__(self, {}) + default_warn_args = ['-Wall', '-Wdeprecated'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic'], + 'everything': (default_warn_args + ['-Wextra', '-Wpedantic'] + + self.supported_warn_args(gnu_common_warning_args))} + + self.base_options = { + OptionKey(o) for o in [ + 'b_colorout', 'b_sanitize', 'b_staticpic', 'b_vscrt', + 'b_coverage', 'b_pgo', 'b_ndebug']} + + self._has_color_support = version_compare(self.version, '>=4.9') + # dependencies were implemented before, but broken - support was fixed in GCC 7.1+ + # (and some backported versions) + self._has_deps_support = version_compare(self.version, '>=7.1') + + def get_colorout_args(self, colortype: str) -> T.List[str]: + if self._has_color_support: + super().get_colorout_args(colortype) + return [] + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + if self._has_deps_support: + return super().get_dependency_gen_args(outtarget, outfile) + return [] + + def get_warn_args(self, level: str) -> T.List[str]: + return self.warn_args[level] + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + return d_gdc_buildtype_args[buildtype] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:2] == '-I' or i[:2] == '-L': + parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:])) + + return parameter_list + + def get_allow_undefined_link_args(self) -> T.List[str]: + return self.linker.get_allow_undefined_args() + + def get_linker_always_args(self) -> T.List[str]: + args = super().get_linker_always_args() + if self.info.is_windows(): + return args + return args + ['-shared-libphobos'] + + def get_assert_args(self, disable: bool) -> T.List[str]: + if disable: + return ['-frelease'] + return [] + +# LDC uses the DMD frontend code to parse and analyse the code. +# It then uses LLVM for the binary code generation and optimizations. +# This function retrieves the dmd frontend version, which determines +# the common features between LDC and DMD. +# We need the complete version text because the match is not on first line +# of version_output +def find_ldc_dmd_frontend_version(version_output: T.Optional[str]) -> T.Optional[str]: + if version_output is None: + return None + version_regex = re.search(r'DMD v(\d+\.\d+\.\d+)', version_output) + if version_regex: + return version_regex.group(1) + return None + +class LLVMDCompiler(DmdLikeCompilerMixin, DCompiler): + + id = 'llvm' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', arch: str, *, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, + is_cross: bool = False, version_output: T.Optional[str] = None): + DCompiler.__init__(self, exelist, version, for_machine, info, arch, + exe_wrapper=exe_wrapper, linker=linker, + full_version=full_version, is_cross=is_cross) + DmdLikeCompilerMixin.__init__(self, dmd_frontend_version=find_ldc_dmd_frontend_version(version_output)) + self.base_options = {OptionKey(o) for o in ['b_coverage', 'b_colorout', 'b_vscrt', 'b_ndebug']} + + def get_colorout_args(self, colortype: str) -> T.List[str]: + if colortype == 'always': + return ['-enable-color'] + return [] + + def get_warn_args(self, level: str) -> T.List[str]: + if level in {'2', '3'}: + return ['-wi', '-dw'] + elif level == '1': + return ['-wi'] + return [] + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + if buildtype != 'plain': + return self._get_target_arch_args() + d_ldc_buildtype_args[buildtype] + return d_ldc_buildtype_args[buildtype] + + def get_pic_args(self) -> T.List[str]: + return ['-relocation-model=pic'] + + def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]: + return self._get_crt_args(crt_val, buildtype) + + def unix_args_to_native(self, args: T.List[str]) -> T.List[str]: + return self._unix_args_to_native(args, self.info, self.linker.id) + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return ldc_optimization_args[optimization_level] + + @classmethod + def use_linker_args(cls, linker: str, version: str) -> T.List[str]: + return [f'-linker={linker}'] + + def get_linker_always_args(self) -> T.List[str]: + args = super().get_linker_always_args() + if self.info.is_windows(): + return args + return args + ['-link-defaultlib-shared'] + + def get_assert_args(self, disable: bool) -> T.List[str]: + if disable: + return ['--release'] + return [] + + def rsp_file_syntax(self) -> RSPFileSyntax: + # We use `mesonlib.is_windows` here because we want to know what the + # build machine is, not the host machine. This really means we would + # have the Environment not the MachineInfo in the compiler. + return RSPFileSyntax.MSVC if is_windows() else RSPFileSyntax.GCC + + +class DmdDCompiler(DmdLikeCompilerMixin, DCompiler): + + id = 'dmd' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', arch: str, *, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None, + is_cross: bool = False): + DCompiler.__init__(self, exelist, version, for_machine, info, arch, + exe_wrapper=exe_wrapper, linker=linker, + full_version=full_version, is_cross=is_cross) + DmdLikeCompilerMixin.__init__(self, version) + self.base_options = {OptionKey(o) for o in ['b_coverage', 'b_colorout', 'b_vscrt', 'b_ndebug']} + + def get_colorout_args(self, colortype: str) -> T.List[str]: + if colortype == 'always': + return ['-color=on'] + return [] + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + if buildtype != 'plain': + return self._get_target_arch_args() + d_dmd_buildtype_args[buildtype] + return d_dmd_buildtype_args[buildtype] + + def get_std_exe_link_args(self) -> T.List[str]: + if self.info.is_windows(): + # DMD links against D runtime only when main symbol is found, + # so these needs to be inserted when linking static D libraries. + if self.arch == 'x86_64': + return ['phobos64.lib'] + elif self.arch == 'x86_mscoff': + return ['phobos32mscoff.lib'] + return ['phobos.lib'] + return [] + + def get_std_shared_lib_link_args(self) -> T.List[str]: + libname = 'libphobos2.so' + if self.info.is_windows(): + if self.arch == 'x86_64': + libname = 'phobos64.lib' + elif self.arch == 'x86_mscoff': + libname = 'phobos32mscoff.lib' + else: + libname = 'phobos.lib' + return ['-shared', '-defaultlib=' + libname] + + def _get_target_arch_args(self) -> T.List[str]: + # DMD32 and DMD64 on 64-bit Windows defaults to 32-bit (OMF). + # Force the target to 64-bit in order to stay consistent + # across the different platforms. + if self.info.is_windows(): + if self.arch == 'x86_64': + return ['-m64'] + elif self.arch == 'x86_mscoff': + return ['-m32mscoff'] + return ['-m32'] + return [] + + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + return self._get_crt_args(crt_val, buildtype) + + def unix_args_to_native(self, args: T.List[str]) -> T.List[str]: + return self._unix_args_to_native(args, self.info, self.linker.id) + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return dmd_optimization_args[optimization_level] + + def can_linker_accept_rsp(self) -> bool: + return False + + def get_linker_always_args(self) -> T.List[str]: + args = super().get_linker_always_args() + if self.info.is_windows(): + return args + return args + ['-defaultlib=phobos2', '-debuglib=phobos2'] + + def get_assert_args(self, disable: bool) -> T.List[str]: + if disable: + return ['-release'] + return [] + + def rsp_file_syntax(self) -> RSPFileSyntax: + return RSPFileSyntax.MSVC diff --git a/vendored-meson/meson/mesonbuild/compilers/detect.py b/vendored-meson/meson/mesonbuild/compilers/detect.py new file mode 100644 index 000000000000..210ec4d4065a --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/detect.py @@ -0,0 +1,1381 @@ +# Copyright 2012-2022 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +from ..mesonlib import ( + MesonException, EnvironmentException, MachineChoice, join_args, + search_version, is_windows, Popen_safe, Popen_safe_logged, windows_proof_rm, +) +from ..envconfig import BinaryTable +from .. import mlog + +from ..linkers import guess_win_linker, guess_nix_linker + +import subprocess +import platform +import re +import shutil +import tempfile +import os +import typing as T + +if T.TYPE_CHECKING: + from .compilers import Compiler + from .c import CCompiler + from .cpp import CPPCompiler + from .fortran import FortranCompiler + from .rust import RustCompiler + from ..linkers.linkers import StaticLinker, DynamicLinker + from ..environment import Environment + from ..programs import ExternalProgram + + +# Default compilers and linkers +# ============================= + +defaults: T.Dict[str, T.List[str]] = {} + +# List of potential compilers. +if is_windows(): + # Intel C and C++ compiler is icl on Windows, but icc and icpc elsewhere. + # Search for icl before cl, since Intel "helpfully" provides a + # cl.exe that returns *exactly the same thing* that microsofts + # cl.exe does, and if icl is present, it's almost certainly what + # you want. + defaults['c'] = ['icl', 'cl', 'cc', 'gcc', 'clang', 'clang-cl', 'pgcc'] + # There is currently no pgc++ for Windows, only for Mac and Linux. + defaults['cpp'] = ['icl', 'cl', 'c++', 'g++', 'clang++', 'clang-cl'] + defaults['fortran'] = ['ifort', 'gfortran', 'flang', 'pgfortran', 'g95'] + # Clang and clang++ are valid, but currently unsupported. + defaults['objc'] = ['cc', 'gcc'] + defaults['objcpp'] = ['c++', 'g++'] + defaults['cs'] = ['csc', 'mcs'] +else: + if platform.machine().lower() == 'e2k': + defaults['c'] = ['cc', 'gcc', 'lcc', 'clang'] + defaults['cpp'] = ['c++', 'g++', 'l++', 'clang++'] + defaults['objc'] = ['clang'] + defaults['objcpp'] = ['clang++'] + else: + defaults['c'] = ['cc', 'gcc', 'clang', 'nvc', 'pgcc', 'icc', 'icx'] + defaults['cpp'] = ['c++', 'g++', 'clang++', 'nvc++', 'pgc++', 'icpc', 'icpx'] + defaults['objc'] = ['cc', 'gcc', 'clang'] + defaults['objcpp'] = ['c++', 'g++', 'clang++'] + defaults['fortran'] = ['gfortran', 'flang', 'nvfortran', 'pgfortran', 'ifort', 'ifx', 'g95'] + defaults['cs'] = ['mcs', 'csc'] +defaults['d'] = ['ldc2', 'ldc', 'gdc', 'dmd'] +defaults['java'] = ['javac'] +defaults['cuda'] = ['nvcc'] +defaults['rust'] = ['rustc'] +defaults['swift'] = ['swiftc'] +defaults['vala'] = ['valac'] +defaults['cython'] = ['cython', 'cython3'] # Official name is cython, but Debian renamed it to cython3. +defaults['static_linker'] = ['ar', 'gar'] +defaults['strip'] = ['strip'] +defaults['vs_static_linker'] = ['lib'] +defaults['clang_cl_static_linker'] = ['llvm-lib'] +defaults['cuda_static_linker'] = ['nvlink'] +defaults['gcc_static_linker'] = ['gcc-ar'] +defaults['clang_static_linker'] = ['llvm-ar'] +defaults['nasm'] = ['nasm', 'yasm'] + + +def compiler_from_language(env: 'Environment', lang: str, for_machine: MachineChoice) -> T.Optional[Compiler]: + lang_map: T.Dict[str, T.Callable[['Environment', MachineChoice], Compiler]] = { + 'c': detect_c_compiler, + 'cpp': detect_cpp_compiler, + 'objc': detect_objc_compiler, + 'cuda': detect_cuda_compiler, + 'objcpp': detect_objcpp_compiler, + 'java': detect_java_compiler, + 'cs': detect_cs_compiler, + 'vala': detect_vala_compiler, + 'd': detect_d_compiler, + 'rust': detect_rust_compiler, + 'fortran': detect_fortran_compiler, + 'swift': detect_swift_compiler, + 'cython': detect_cython_compiler, + 'nasm': detect_nasm_compiler, + 'masm': detect_masm_compiler, + } + return lang_map[lang](env, for_machine) if lang in lang_map else None + +def detect_compiler_for(env: 'Environment', lang: str, for_machine: MachineChoice, skip_sanity_check: bool) -> T.Optional[Compiler]: + comp = compiler_from_language(env, lang, for_machine) + if comp is None: + return comp + assert comp.for_machine == for_machine + env.coredata.process_new_compiler(lang, comp, env) + if not skip_sanity_check: + comp.sanity_check(env.get_scratch_dir(), env) + env.coredata.compilers[comp.for_machine][lang] = comp + return comp + + +# Helpers +# ======= + +def _get_compilers(env: 'Environment', lang: str, for_machine: MachineChoice) -> T.Tuple[T.List[T.List[str]], T.List[str], T.Optional['ExternalProgram']]: + ''' + The list of compilers is detected in the exact same way for + C, C++, ObjC, ObjC++, Fortran, CS so consolidate it here. + ''' + value = env.lookup_binary_entry(for_machine, lang) + if value is not None: + comp, ccache = BinaryTable.parse_entry(value) + # Return value has to be a list of compiler 'choices' + compilers = [comp] + else: + if not env.machines.matches_build_machine(for_machine): + raise EnvironmentException(f'{lang!r} compiler binary not defined in cross or native file') + compilers = [[x] for x in defaults[lang]] + ccache = BinaryTable.detect_compiler_cache() + + if env.machines.matches_build_machine(for_machine): + exe_wrap: T.Optional[ExternalProgram] = None + else: + exe_wrap = env.get_exe_wrapper() + + return compilers, ccache, exe_wrap + +def _handle_exceptions( + exceptions: T.Mapping[str, T.Union[Exception, str]], + binaries: T.List[T.List[str]], + bintype: str = 'compiler') -> T.NoReturn: + errmsg = f'Unknown {bintype}(s): {binaries}' + if exceptions: + errmsg += '\nThe following exception(s) were encountered:' + for c, e in exceptions.items(): + errmsg += f'\nRunning `{c}` gave "{e}"' + raise EnvironmentException(errmsg) + + +# Linker specific +# =============== + +def detect_static_linker(env: 'Environment', compiler: Compiler) -> StaticLinker: + from . import d + from ..linkers import linkers + linker = env.lookup_binary_entry(compiler.for_machine, 'ar') + if linker is not None: + trials = [linker] + else: + default_linkers = [[l] for l in defaults['static_linker']] + if compiler.language == 'cuda': + trials = [defaults['cuda_static_linker']] + default_linkers + elif compiler.get_argument_syntax() == 'msvc': + trials = [defaults['vs_static_linker'], defaults['clang_cl_static_linker']] + elif compiler.id == 'gcc': + # Use gcc-ar if available; needed for LTO + trials = [defaults['gcc_static_linker']] + default_linkers + elif compiler.id == 'clang': + # Use llvm-ar if available; needed for LTO + trials = [defaults['clang_static_linker']] + default_linkers + elif compiler.language == 'd': + # Prefer static linkers over linkers used by D compilers + if is_windows(): + trials = [defaults['vs_static_linker'], defaults['clang_cl_static_linker'], compiler.get_linker_exelist()] + else: + trials = default_linkers + elif compiler.id == 'intel-cl' and compiler.language == 'c': # why not cpp? Is this a bug? + # Intel has it's own linker that acts like microsoft's lib + trials = [['xilib']] + elif is_windows() and compiler.id == 'pgi': # this handles cpp / nvidia HPC, in addition to just c/fortran + trials = [['ar']] # For PGI on Windows, "ar" is just a wrapper calling link/lib. + elif is_windows() and compiler.id == 'nasm': + # This may well be LINK.EXE if it's under a MSVC environment + trials = [defaults['vs_static_linker'], defaults['clang_cl_static_linker']] + default_linkers + else: + trials = default_linkers + popen_exceptions = {} + for linker in trials: + linker_name = os.path.basename(linker[0]) + + if any(os.path.basename(x) in {'lib', 'lib.exe', 'llvm-lib', 'llvm-lib.exe', 'xilib', 'xilib.exe'} for x in linker): + arg = '/?' + elif linker_name in {'ar2000', 'ar2000.exe', 'ar430', 'ar430.exe', 'armar', 'armar.exe'}: + arg = '?' + else: + arg = '--version' + try: + p, out, err = Popen_safe_logged(linker + [arg], msg='Detecting linker via') + except OSError as e: + popen_exceptions[join_args(linker + [arg])] = e + continue + if "xilib: executing 'lib'" in err: + return linkers.IntelVisualStudioLinker(linker, getattr(compiler, 'machine', None)) + if '/OUT:' in out.upper() or '/OUT:' in err.upper(): + return linkers.VisualStudioLinker(linker, getattr(compiler, 'machine', None)) + if 'ar-Error-Unknown switch: --version' in err: + return linkers.PGIStaticLinker(linker) + if p.returncode == 0 and 'armar' in linker_name: + return linkers.ArmarLinker(linker) + if 'DMD32 D Compiler' in out or 'DMD64 D Compiler' in out: + assert isinstance(compiler, d.DCompiler) + return linkers.DLinker(linker, compiler.arch) + if 'LDC - the LLVM D compiler' in out: + assert isinstance(compiler, d.DCompiler) + return linkers.DLinker(linker, compiler.arch, rsp_syntax=compiler.rsp_file_syntax()) + if 'GDC' in out and ' based on D ' in out: + assert isinstance(compiler, d.DCompiler) + return linkers.DLinker(linker, compiler.arch) + if err.startswith('Renesas') and 'rlink' in linker_name: + return linkers.CcrxLinker(linker) + if out.startswith('GNU ar') and 'xc16-ar' in linker_name: + return linkers.Xc16Linker(linker) + if 'Texas Instruments Incorporated' in out: + if 'ar2000' in linker_name: + return linkers.C2000Linker(linker) + else: + return linkers.TILinker(linker) + if out.startswith('The CompCert'): + return linkers.CompCertLinker(linker) + if out.strip().startswith('Metrowerks') or out.strip().startswith('Freescale'): + if 'ARM' in out: + return linkers.MetrowerksStaticLinkerARM(linker) + else: + return linkers.MetrowerksStaticLinkerEmbeddedPowerPC(linker) + if p.returncode == 0: + return linkers.ArLinker(compiler.for_machine, linker) + if p.returncode == 1 and err.startswith('usage'): # OSX + return linkers.AppleArLinker(compiler.for_machine, linker) + if p.returncode == 1 and err.startswith('Usage'): # AIX + return linkers.AIXArLinker(linker) + if p.returncode == 1 and err.startswith('ar: bad option: --'): # Solaris + return linkers.ArLinker(compiler.for_machine, linker) + _handle_exceptions(popen_exceptions, trials, 'linker') + raise EnvironmentException('Unreachable code (exception to make mypy happy)') + + +# Compilers +# ========= + + +def _detect_c_or_cpp_compiler(env: 'Environment', lang: str, for_machine: MachineChoice, *, override_compiler: T.Optional[T.List[str]] = None) -> Compiler: + """Shared implementation for finding the C or C++ compiler to use. + + the override_compiler option is provided to allow compilers which use + the compiler (GCC or Clang usually) as their shared linker, to find + the linker they need. + """ + from . import c, cpp + from ..linkers import linkers + popen_exceptions: T.Dict[str, T.Union[Exception, str]] = {} + compilers, ccache, exe_wrap = _get_compilers(env, lang, for_machine) + if override_compiler is not None: + compilers = [override_compiler] + is_cross = env.is_cross_build(for_machine) + info = env.machines[for_machine] + cls: T.Union[T.Type[CCompiler], T.Type[CPPCompiler]] + lnk: T.Union[T.Type[StaticLinker], T.Type[DynamicLinker]] + + for compiler in compilers: + if isinstance(compiler, str): + compiler = [compiler] + compiler_name = os.path.basename(compiler[0]) + + if any(os.path.basename(x) in {'cl', 'cl.exe', 'clang-cl', 'clang-cl.exe'} for x in compiler): + # Watcom C provides it's own cl.exe clone that mimics an older + # version of Microsoft's compiler. Since Watcom's cl.exe is + # just a wrapper, we skip using it if we detect its presence + # so as not to confuse Meson when configuring for MSVC. + # + # Additionally the help text of Watcom's cl.exe is paged, and + # the binary will not exit without human intervention. In + # practice, Meson will block waiting for Watcom's cl.exe to + # exit, which requires user input and thus will never exit. + if 'WATCOM' in os.environ: + def sanitize(p: str) -> str: + return os.path.normcase(os.path.abspath(p)) + + watcom_cls = [sanitize(os.path.join(os.environ['WATCOM'], 'BINNT', 'cl')), + sanitize(os.path.join(os.environ['WATCOM'], 'BINNT', 'cl.exe')), + sanitize(os.path.join(os.environ['WATCOM'], 'BINNT64', 'cl')), + sanitize(os.path.join(os.environ['WATCOM'], 'BINNT64', 'cl.exe'))] + found_cl = sanitize(shutil.which('cl')) + if found_cl in watcom_cls: + mlog.debug('Skipping unsupported cl.exe clone at:', found_cl) + continue + arg = '/?' + elif 'armcc' in compiler_name: + arg = '--vsn' + elif 'ccrx' in compiler_name: + arg = '-v' + elif 'xc16' in compiler_name: + arg = '--version' + elif 'ccomp' in compiler_name: + arg = '-version' + elif compiler_name in {'cl2000', 'cl2000.exe', 'cl430', 'cl430.exe', 'armcl', 'armcl.exe'}: + # TI compiler + arg = '-version' + elif compiler_name in {'icl', 'icl.exe'}: + # if you pass anything to icl you get stuck in a pager + arg = '' + else: + arg = '--version' + + cmd = compiler + [arg] + try: + p, out, err = Popen_safe_logged(cmd, msg='Detecting compiler via') + except OSError as e: + popen_exceptions[join_args(cmd)] = e + continue + + if 'ccrx' in compiler_name: + out = err + + full_version = out.split('\n', 1)[0] + version = search_version(out) + + guess_gcc_or_lcc: T.Optional[str] = None + if 'Free Software Foundation' in out or out.startswith('xt-'): + guess_gcc_or_lcc = 'gcc' + if 'e2k' in out and 'lcc' in out: + guess_gcc_or_lcc = 'lcc' + if 'Microchip Technology' in out: + # this output has "Free Software Foundation" in its version + guess_gcc_or_lcc = None + + if guess_gcc_or_lcc: + defines = _get_gnu_compiler_defines(compiler) + if not defines: + popen_exceptions[join_args(compiler)] = 'no pre-processor defines' + continue + + if guess_gcc_or_lcc == 'lcc': + version = _get_lcc_version_from_defines(defines) + cls = c.ElbrusCCompiler if lang == 'c' else cpp.ElbrusCPPCompiler + else: + version = _get_gnu_version_from_defines(defines) + cls = c.GnuCCompiler if lang == 'c' else cpp.GnuCPPCompiler + + linker = guess_nix_linker(env, compiler, cls, version, for_machine) + + return cls( + ccache, compiler, version, for_machine, is_cross, + info, exe_wrap, defines=defines, full_version=full_version, + linker=linker) + + if 'Emscripten' in out: + cls = c.EmscriptenCCompiler if lang == 'c' else cpp.EmscriptenCPPCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + + # emcc requires a file input in order to pass arguments to the + # linker. It'll exit with an error code, but still print the + # linker version. + with tempfile.NamedTemporaryFile(suffix='.c') as f: + cmd = compiler + [cls.LINKER_PREFIX + "--version", f.name] + _, o, _ = Popen_safe(cmd) + + linker = linkers.WASMDynamicLinker( + compiler, for_machine, cls.LINKER_PREFIX, + [], version=search_version(o)) + return cls( + ccache, compiler, version, for_machine, is_cross, info, + exe_wrap, linker=linker, full_version=full_version) + + if 'Arm C/C++/Fortran Compiler' in out: + arm_ver_match = re.search(r'version (\d+)\.(\d+)\.?(\d+)? \(build number (\d+)\)', out) + assert arm_ver_match is not None, 'for mypy' # because mypy *should* be complaining that this could be None + version = '.'.join([x for x in arm_ver_match.groups() if x is not None]) + if lang == 'c': + cls = c.ArmLtdClangCCompiler + elif lang == 'cpp': + cls = cpp.ArmLtdClangCPPCompiler + linker = guess_nix_linker(env, compiler, cls, version, for_machine) + return cls( + ccache, compiler, version, for_machine, is_cross, info, + exe_wrap, linker=linker) + if 'armclang' in out: + # The compiler version is not present in the first line of output, + # instead it is present in second line, startswith 'Component:'. + # So, searching for the 'Component' in out although we know it is + # present in second line, as we are not sure about the + # output format in future versions + arm_ver_match = re.search('.*Component.*', out) + if arm_ver_match is None: + popen_exceptions[join_args(compiler)] = 'version string not found' + continue + arm_ver_str = arm_ver_match.group(0) + # Override previous values + version = search_version(arm_ver_str) + full_version = arm_ver_str + cls = c.ArmclangCCompiler if lang == 'c' else cpp.ArmclangCPPCompiler + linker = linkers.ArmClangDynamicLinker(for_machine, version=version) + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + return cls( + ccache, compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + if 'CL.EXE COMPATIBILITY' in out: + # if this is clang-cl masquerading as cl, detect it as cl, not + # clang + arg = '--version' + try: + p, out, err = Popen_safe(compiler + [arg]) + except OSError as e: + popen_exceptions[join_args(compiler + [arg])] = e + version = search_version(out) + match = re.search('^Target: (.*?)-', out, re.MULTILINE) + if match: + target = match.group(1) + else: + target = 'unknown target' + cls = c.ClangClCCompiler if lang == 'c' else cpp.ClangClCPPCompiler + linker = guess_win_linker(env, ['lld-link'], cls, version, for_machine) + return cls( + compiler, version, for_machine, is_cross, info, target, + exe_wrap, linker=linker) + if 'clang' in out or 'Clang' in out: + linker = None + + defines = _get_clang_compiler_defines(compiler) + + # Even if the for_machine is darwin, we could be using vanilla + # clang. + if 'Apple' in out: + cls = c.AppleClangCCompiler if lang == 'c' else cpp.AppleClangCPPCompiler + else: + cls = c.ClangCCompiler if lang == 'c' else cpp.ClangCPPCompiler + + if 'windows' in out or env.machines[for_machine].is_windows(): + # If we're in a MINGW context this actually will use a gnu + # style ld, but for clang on "real" windows we'll use + # either link.exe or lld-link.exe + try: + linker = guess_win_linker(env, compiler, cls, version, for_machine, invoked_directly=False) + except MesonException: + pass + if linker is None: + linker = guess_nix_linker(env, compiler, cls, version, for_machine) + + return cls( + ccache, compiler, version, for_machine, is_cross, info, + exe_wrap, defines=defines, full_version=full_version, linker=linker) + + if 'Intel(R) C++ Intel(R)' in err: + version = search_version(err) + target = 'x86' if 'IA-32' in err else 'x86_64' + cls = c.IntelClCCompiler if lang == 'c' else cpp.IntelClCPPCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = linkers.XilinkDynamicLinker(for_machine, [], version=version) + return cls( + compiler, version, for_machine, is_cross, info, target, + exe_wrap, linker=linker) + if 'Intel(R) oneAPI DPC++/C++ Compiler for applications' in err: + version = search_version(err) + target = 'x86' if 'IA-32' in err else 'x86_64' + cls = c.IntelLLVMClCCompiler if lang == 'c' else cpp.IntelLLVMClCPPCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = linkers.XilinkDynamicLinker(for_machine, [], version=version) + return cls( + compiler, version, for_machine, is_cross, info, target, + exe_wrap, linker=linker) + if 'Microsoft' in out or 'Microsoft' in err: + # Latest versions of Visual Studio print version + # number to stderr but earlier ones print version + # on stdout. Why? Lord only knows. + # Check both outputs to figure out version. + for lookat in [err, out]: + version = search_version(lookat) + if version != 'unknown version': + break + else: + raise EnvironmentException(f'Failed to detect MSVC compiler version: stderr was\n{err!r}') + cl_signature = lookat.split('\n', maxsplit=1)[0] + match = re.search(r'.*(x86|x64|ARM|ARM64)([^_A-Za-z0-9]|$)', cl_signature) + if match: + target = match.group(1) + else: + m = f'Failed to detect MSVC compiler target architecture: \'cl /?\' output is\n{cl_signature}' + raise EnvironmentException(m) + cls = c.VisualStudioCCompiler if lang == 'c' else cpp.VisualStudioCPPCompiler + linker = guess_win_linker(env, ['link'], cls, version, for_machine) + # As of this writing, CCache does not support MSVC but sccache does. + if 'sccache' not in ccache: + ccache = [] + return cls( + ccache, compiler, version, for_machine, is_cross, info, target, + exe_wrap, full_version=cl_signature, linker=linker) + if 'PGI Compilers' in out: + cls = c.PGICCompiler if lang == 'c' else cpp.PGICPPCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = linkers.PGIDynamicLinker(compiler, for_machine, cls.LINKER_PREFIX, [], version=version) + return cls( + ccache, compiler, version, for_machine, is_cross, + info, exe_wrap, linker=linker) + if 'NVIDIA Compilers and Tools' in out: + cls = c.NvidiaHPC_CCompiler if lang == 'c' else cpp.NvidiaHPC_CPPCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = linkers.NvidiaHPC_DynamicLinker(compiler, for_machine, cls.LINKER_PREFIX, [], version=version) + return cls( + ccache, compiler, version, for_machine, is_cross, + info, exe_wrap, linker=linker) + if '(ICC)' in out: + cls = c.IntelCCompiler if lang == 'c' else cpp.IntelCPPCompiler + l = guess_nix_linker(env, compiler, cls, version, for_machine) + return cls( + ccache, compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=l) + if 'Intel(R) oneAPI' in out: + cls = c.IntelLLVMCCompiler if lang == 'c' else cpp.IntelLLVMCPPCompiler + l = guess_nix_linker(env, compiler, cls, version, for_machine) + return cls( + ccache, compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=l) + if 'TMS320C2000 C/C++' in out or 'MSP430 C/C++' in out or 'TI ARM C/C++ Compiler' in out: + if 'TMS320C2000 C/C++' in out: + cls = c.C2000CCompiler if lang == 'c' else cpp.C2000CPPCompiler + lnk = linkers.C2000DynamicLinker + else: + cls = c.TICCompiler if lang == 'c' else cpp.TICPPCompiler + lnk = linkers.TIDynamicLinker + + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = lnk(compiler, for_machine, version=version) + return cls( + ccache, compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + if 'ARM' in out and not ('Metrowerks' in out or 'Freescale' in out): + cls = c.ArmCCompiler if lang == 'c' else cpp.ArmCPPCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = linkers.ArmDynamicLinker(for_machine, version=version) + return cls( + ccache, compiler, version, for_machine, is_cross, + info, exe_wrap, full_version=full_version, linker=linker) + if 'RX Family' in out: + cls = c.CcrxCCompiler if lang == 'c' else cpp.CcrxCPPCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = linkers.CcrxDynamicLinker(for_machine, version=version) + return cls( + ccache, compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'Microchip Technology' in out: + cls = c.Xc16CCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = linkers.Xc16DynamicLinker(for_machine, version=version) + return cls( + ccache, compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'CompCert' in out: + cls = c.CompCertCCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = linkers.CompCertDynamicLinker(for_machine, version=version) + return cls( + ccache, compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'Metrowerks C/C++' in out or 'Freescale C/C++' in out: + if 'ARM' in out: + cls = c.MetrowerksCCompilerARM if lang == 'c' else cpp.MetrowerksCPPCompilerARM + lnk = linkers.MetrowerksLinkerARM + else: + cls = c.MetrowerksCCompilerEmbeddedPowerPC if lang == 'c' else cpp.MetrowerksCPPCompilerEmbeddedPowerPC + lnk = linkers.MetrowerksLinkerEmbeddedPowerPC + + mwcc_ver_match = re.search(r'Version (\d+)\.(\d+)\.?(\d+)? build (\d+)', out) + assert mwcc_ver_match is not None, 'for mypy' # because mypy *should* be complaning that this could be None + compiler_version = '.'.join(x for x in mwcc_ver_match.groups() if x is not None) + + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + ld = env.lookup_binary_entry(for_machine, cls.language + '_ld') + + if ld is not None: + _, o_ld, _ = Popen_safe(ld + ['--version']) + + mwld_ver_match = re.search(r'Version (\d+)\.(\d+)\.?(\d+)? build (\d+)', o_ld) + assert mwld_ver_match is not None, 'for mypy' # because mypy *should* be complaning that this could be None + linker_version = '.'.join(x for x in mwld_ver_match.groups() if x is not None) + + linker = lnk(ld, for_machine, version=linker_version) + else: + raise EnvironmentException(f'Failed to detect linker for {cls.id!r} compiler. Please update your cross file(s).') + + return cls( + ccache, compiler, compiler_version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + _handle_exceptions(popen_exceptions, compilers) + raise EnvironmentException(f'Unknown compiler {compilers}') + +def detect_c_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + return _detect_c_or_cpp_compiler(env, 'c', for_machine) + +def detect_cpp_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + return _detect_c_or_cpp_compiler(env, 'cpp', for_machine) + +def detect_cuda_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + from .cuda import CudaCompiler + from ..linkers.linkers import CudaLinker + popen_exceptions = {} + is_cross = env.is_cross_build(for_machine) + compilers, ccache, exe_wrap = _get_compilers(env, 'cuda', for_machine) + info = env.machines[for_machine] + for compiler in compilers: + arg = '--version' + try: + p, out, err = Popen_safe_logged(compiler + [arg], msg='Detecting compiler via') + except OSError as e: + popen_exceptions[join_args(compiler + [arg])] = e + continue + # Example nvcc printout: + # + # nvcc: NVIDIA (R) Cuda compiler driver + # Copyright (c) 2005-2018 NVIDIA Corporation + # Built on Sat_Aug_25_21:08:01_CDT_2018 + # Cuda compilation tools, release 10.0, V10.0.130 + # + # search_version() first finds the "10.0" after "release", + # rather than the more precise "10.0.130" after "V". + # The patch version number is occasionally important; For + # instance, on Linux, + # - CUDA Toolkit 8.0.44 requires NVIDIA Driver 367.48 + # - CUDA Toolkit 8.0.61 requires NVIDIA Driver 375.26 + # Luckily, the "V" also makes it very simple to extract + # the full version: + version = out.strip().rsplit('V', maxsplit=1)[-1] + cpp_compiler = detect_cpp_compiler(env, for_machine) + cls = CudaCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = CudaLinker(compiler, for_machine, CudaCompiler.LINKER_PREFIX, [], version=CudaLinker.parse_version()) + return cls(ccache, compiler, version, for_machine, is_cross, exe_wrap, host_compiler=cpp_compiler, info=info, linker=linker) + raise EnvironmentException(f'Could not find suitable CUDA compiler: "{"; ".join([" ".join(c) for c in compilers])}"') + +def detect_fortran_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + from . import fortran + from ..linkers import linkers + popen_exceptions: T.Dict[str, T.Union[Exception, str]] = {} + compilers, ccache, exe_wrap = _get_compilers(env, 'fortran', for_machine) + is_cross = env.is_cross_build(for_machine) + info = env.machines[for_machine] + cls: T.Type[FortranCompiler] + for compiler in compilers: + for arg in ['--version', '-V']: + try: + p, out, err = Popen_safe_logged(compiler + [arg], msg='Detecting compiler via') + except OSError as e: + popen_exceptions[join_args(compiler + [arg])] = e + continue + + version = search_version(out) + full_version = out.split('\n', 1)[0] + + guess_gcc_or_lcc: T.Optional[str] = None + if 'GNU Fortran' in out: + guess_gcc_or_lcc = 'gcc' + if 'e2k' in out and 'lcc' in out: + guess_gcc_or_lcc = 'lcc' + + if guess_gcc_or_lcc: + defines = _get_gnu_compiler_defines(compiler) + if not defines: + popen_exceptions[join_args(compiler)] = 'no pre-processor defines' + continue + if guess_gcc_or_lcc == 'lcc': + version = _get_lcc_version_from_defines(defines) + cls = fortran.ElbrusFortranCompiler + linker = guess_nix_linker(env, compiler, cls, version, for_machine) + return cls( + compiler, version, for_machine, is_cross, info, + exe_wrap, defines, full_version=full_version, linker=linker) + else: + version = _get_gnu_version_from_defines(defines) + cls = fortran.GnuFortranCompiler + linker = guess_nix_linker(env, compiler, cls, version, for_machine) + return cls( + compiler, version, for_machine, is_cross, info, + exe_wrap, defines, full_version=full_version, linker=linker) + + if 'Arm C/C++/Fortran Compiler' in out: + cls = fortran.ArmLtdFlangFortranCompiler + arm_ver_match = re.search(r'version (\d+)\.(\d+)\.?(\d+)? \(build number (\d+)\)', out) + assert arm_ver_match is not None, 'for mypy' # because mypy *should* be complaining that this could be None + version = '.'.join([x for x in arm_ver_match.groups() if x is not None]) + linker = guess_nix_linker(env, compiler, cls, version, for_machine) + return cls( + compiler, version, for_machine, is_cross, info, + exe_wrap, linker=linker) + if 'G95' in out: + cls = fortran.G95FortranCompiler + linker = guess_nix_linker(env, compiler, cls, version, for_machine) + return cls( + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'Sun Fortran' in err: + version = search_version(err) + cls = fortran.SunFortranCompiler + linker = guess_nix_linker(env, compiler, cls, version, for_machine) + return cls( + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'Intel(R) Fortran Compiler for applications' in err: + version = search_version(err) + target = 'x86' if 'IA-32' in err else 'x86_64' + cls = fortran.IntelLLVMClFortranCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = linkers.XilinkDynamicLinker(for_machine, [], version=version) + return cls( + compiler, version, for_machine, is_cross, info, + target, exe_wrap, linker=linker) + + if 'Intel(R) Visual Fortran' in err or 'Intel(R) Fortran' in err: + version = search_version(err) + target = 'x86' if 'IA-32' in err else 'x86_64' + cls = fortran.IntelClFortranCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = linkers.XilinkDynamicLinker(for_machine, [], version=version) + return cls( + compiler, version, for_machine, is_cross, info, + target, exe_wrap, linker=linker) + + if 'ifort (IFORT)' in out: + cls = fortran.IntelFortranCompiler + linker = guess_nix_linker(env, compiler, cls, version, for_machine) + return cls( + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'ifx (IFORT)' in out or 'ifx (IFX)' in out: + cls = fortran.IntelLLVMFortranCompiler + linker = guess_nix_linker(env, compiler, cls, version, for_machine) + return cls( + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'PathScale EKOPath(tm)' in err: + return fortran.PathScaleFortranCompiler( + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version) + + if 'PGI Compilers' in out: + cls = fortran.PGIFortranCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = linkers.PGIDynamicLinker(compiler, for_machine, + cls.LINKER_PREFIX, [], version=version) + return cls( + compiler, version, for_machine, is_cross, info, exe_wrap, + full_version=full_version, linker=linker) + + if 'NVIDIA Compilers and Tools' in out: + cls = fortran.NvidiaHPC_FortranCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = linkers.PGIDynamicLinker(compiler, for_machine, + cls.LINKER_PREFIX, [], version=version) + return cls( + compiler, version, for_machine, is_cross, info, exe_wrap, + full_version=full_version, linker=linker) + + if 'flang' in out or 'clang' in out: + cls = fortran.FlangFortranCompiler + linker = guess_nix_linker(env, + compiler, cls, version, for_machine) + return cls( + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'Open64 Compiler Suite' in err: + cls = fortran.Open64FortranCompiler + linker = guess_nix_linker(env, + compiler, cls, version, for_machine) + return cls( + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + if 'NAG Fortran' in err: + full_version = err.split('\n', 1)[0] + version = full_version.split()[-1] + cls = fortran.NAGFortranCompiler + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + linker = linkers.NAGDynamicLinker( + compiler, for_machine, cls.LINKER_PREFIX, [], + version=version) + return cls( + compiler, version, for_machine, is_cross, info, + exe_wrap, full_version=full_version, linker=linker) + + _handle_exceptions(popen_exceptions, compilers) + raise EnvironmentException('Unreachable code (exception to make mypy happy)') + +def detect_objc_compiler(env: 'Environment', for_machine: MachineChoice) -> 'Compiler': + return _detect_objc_or_objcpp_compiler(env, 'objc', for_machine) + +def detect_objcpp_compiler(env: 'Environment', for_machine: MachineChoice) -> 'Compiler': + return _detect_objc_or_objcpp_compiler(env, 'objcpp', for_machine) + +def _detect_objc_or_objcpp_compiler(env: 'Environment', lang: str, for_machine: MachineChoice) -> 'Compiler': + from . import objc, objcpp + popen_exceptions: T.Dict[str, T.Union[Exception, str]] = {} + compilers, ccache, exe_wrap = _get_compilers(env, lang, for_machine) + is_cross = env.is_cross_build(for_machine) + info = env.machines[for_machine] + comp: T.Union[T.Type[objc.ObjCCompiler], T.Type[objcpp.ObjCPPCompiler]] + + for compiler in compilers: + arg = ['--version'] + try: + p, out, err = Popen_safe_logged(compiler + arg, msg='Detecting compiler via') + except OSError as e: + popen_exceptions[join_args(compiler + arg)] = e + continue + version = search_version(out) + if 'Free Software Foundation' in out: + defines = _get_gnu_compiler_defines(compiler) + if not defines: + popen_exceptions[join_args(compiler)] = 'no pre-processor defines' + continue + version = _get_gnu_version_from_defines(defines) + comp = objc.GnuObjCCompiler if lang == 'objc' else objcpp.GnuObjCPPCompiler + linker = guess_nix_linker(env, compiler, comp, version, for_machine) + return comp( + ccache, compiler, version, for_machine, is_cross, info, + exe_wrap, defines, linker=linker) + if 'clang' in out: + linker = None + defines = _get_clang_compiler_defines(compiler) + if not defines: + popen_exceptions[join_args(compiler)] = 'no pre-processor defines' + continue + if 'Apple' in out: + comp = objc.AppleClangObjCCompiler if lang == 'objc' else objcpp.AppleClangObjCPPCompiler + else: + comp = objc.ClangObjCCompiler if lang == 'objc' else objcpp.ClangObjCPPCompiler + if 'windows' in out or env.machines[for_machine].is_windows(): + # If we're in a MINGW context this actually will use a gnu style ld + try: + linker = guess_win_linker(env, compiler, comp, version, for_machine) + except MesonException: + pass + + if not linker: + linker = guess_nix_linker(env, compiler, comp, version, for_machine) + return comp( + ccache, compiler, version, for_machine, + is_cross, info, exe_wrap, linker=linker, defines=defines) + _handle_exceptions(popen_exceptions, compilers) + raise EnvironmentException('Unreachable code (exception to make mypy happy)') + +def detect_java_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + from .java import JavaCompiler + exelist = env.lookup_binary_entry(for_machine, 'java') + info = env.machines[for_machine] + if exelist is None: + # TODO support fallback + exelist = [defaults['java'][0]] + + try: + p, out, err = Popen_safe_logged(exelist + ['-version'], msg='Detecting compiler via') + except OSError: + raise EnvironmentException('Could not execute Java compiler: {}'.format(join_args(exelist))) + if 'javac' in out or 'javac' in err: + version = search_version(err if 'javac' in err else out) + if not version or version == 'unknown version': + parts = (err if 'javac' in err else out).split() + if len(parts) > 1: + version = parts[1] + comp_class = JavaCompiler + env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env) + return comp_class(exelist, version, for_machine, info) + raise EnvironmentException('Unknown compiler: ' + join_args(exelist)) + +def detect_cs_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + from . import cs + compilers, ccache, exe_wrap = _get_compilers(env, 'cs', for_machine) + popen_exceptions = {} + info = env.machines[for_machine] + for comp in compilers: + try: + p, out, err = Popen_safe_logged(comp + ['--version'], msg='Detecting compiler via') + except OSError as e: + popen_exceptions[join_args(comp + ['--version'])] = e + continue + + version = search_version(out) + cls: T.Type[cs.CsCompiler] + if 'Mono' in out: + cls = cs.MonoCompiler + elif "Visual C#" in out: + cls = cs.VisualStudioCsCompiler + else: + continue + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + return cls(comp, version, for_machine, info) + + _handle_exceptions(popen_exceptions, compilers) + raise EnvironmentException('Unreachable code (exception to make mypy happy)') + +def detect_cython_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + """Search for a cython compiler.""" + from .cython import CythonCompiler + compilers, _, _ = _get_compilers(env, 'cython', MachineChoice.BUILD) + is_cross = env.is_cross_build(for_machine) + info = env.machines[for_machine] + + popen_exceptions: T.Dict[str, Exception] = {} + for comp in compilers: + try: + _, out, err = Popen_safe_logged(comp + ['-V'], msg='Detecting compiler via') + except OSError as e: + popen_exceptions[join_args(comp + ['-V'])] = e + continue + + version: T.Optional[str] = None + # 3.0 + if 'Cython' in out: + version = search_version(out) + # older + elif 'Cython' in err: + version = search_version(err) + if version is not None: + comp_class = CythonCompiler + env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env) + return comp_class([], comp, version, for_machine, info, is_cross=is_cross) + _handle_exceptions(popen_exceptions, compilers) + raise EnvironmentException('Unreachable code (exception to make mypy happy)') + +def detect_vala_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + from .vala import ValaCompiler + exelist = env.lookup_binary_entry(MachineChoice.BUILD, 'vala') + is_cross = env.is_cross_build(for_machine) + info = env.machines[for_machine] + if exelist is None: + # TODO support fallback + exelist = [defaults['vala'][0]] + + try: + p, out = Popen_safe_logged(exelist + ['--version'], msg='Detecting compiler via')[0:2] + except OSError: + raise EnvironmentException('Could not execute Vala compiler: {}'.format(join_args(exelist))) + version = search_version(out) + if 'Vala' in out: + comp_class = ValaCompiler + env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env) + return comp_class(exelist, version, for_machine, is_cross, info) + raise EnvironmentException('Unknown compiler: ' + join_args(exelist)) + +def detect_rust_compiler(env: 'Environment', for_machine: MachineChoice) -> RustCompiler: + from . import rust + from ..linkers import linkers + popen_exceptions = {} # type: T.Dict[str, Exception] + compilers, _, exe_wrap = _get_compilers(env, 'rust', for_machine) + is_cross = env.is_cross_build(for_machine) + info = env.machines[for_machine] + + cc = detect_c_compiler(env, for_machine) + is_link_exe = isinstance(cc.linker, linkers.VisualStudioLikeLinkerMixin) + override = env.lookup_binary_entry(for_machine, 'rust_ld') + + for compiler in compilers: + arg = ['--version'] + try: + out = Popen_safe_logged(compiler + arg, msg='Detecting compiler via')[1] + except OSError as e: + popen_exceptions[join_args(compiler + arg)] = e + continue + + version = search_version(out) + cls: T.Type[RustCompiler] = rust.RustCompiler + + # Clippy is a wrapper around rustc, but it doesn't have rustc in it's + # output. We can otherwise treat it as rustc. + if 'clippy' in out: + # clippy returns its own version and not the rustc version by + # default so try harder here to get the correct version. + # Also replace the whole output with the rustc output in + # case this is later used for other purposes. + arg = ['--rustc', '--version'] + try: + out = Popen_safe(compiler + arg)[1] + except OSError as e: + popen_exceptions[join_args(compiler + arg)] = e + continue + version = search_version(out) + + cls = rust.ClippyRustCompiler + + if 'rustc' in out: + # On Linux and mac rustc will invoke gcc (clang for mac + # presumably) and it can do this windows, for dynamic linking. + # this means the easiest way to C compiler for dynamic linking. + # figure out what linker to use is to just get the value of the + # C compiler and use that as the basis of the rust linker. + # However, there are two things we need to change, if CC is not + # the default use that, and second add the necessary arguments + # to rust to use -fuse-ld + + if any(a.startswith('linker=') for a in compiler): + mlog.warning( + 'Please do not put -C linker= in your compiler ' + 'command, set rust_ld=command in your cross file ' + 'or use the RUSTC_LD environment variable, otherwise meson ' + 'will override your selection.') + + compiler = compiler.copy() # avoid mutating the original list + + if override is None: + extra_args: T.Dict[str, T.Union[str, bool]] = {} + always_args: T.List[str] = [] + if is_link_exe: + compiler.extend(cls.use_linker_args(cc.linker.exelist[0], '')) + extra_args['direct'] = True + extra_args['machine'] = cc.linker.machine + else: + exelist = cc.linker.exelist + cc.linker.get_always_args() + if 'ccache' in exelist[0]: + del exelist[0] + c = exelist.pop(0) + compiler.extend(cls.use_linker_args(c, '')) + + # Also ensure that we pass any extra arguments to the linker + for l in exelist: + compiler.extend(['-C', f'link-arg={l}']) + + # This trickery with type() gets us the class of the linker + # so we can initialize a new copy for the Rust Compiler + # TODO rewrite this without type: ignore + assert cc.linker is not None, 'for mypy' + if is_link_exe: + linker = type(cc.linker)(for_machine, always_args, exelist=cc.linker.exelist, # type: ignore + version=cc.linker.version, **extra_args) # type: ignore + else: + linker = type(cc.linker)(compiler, for_machine, cc.LINKER_PREFIX, + always_args=always_args, version=cc.linker.version, + **extra_args) + elif 'link' in override[0]: + linker = guess_win_linker(env, + override, cls, version, for_machine, use_linker_prefix=False) + # rustc takes linker arguments without a prefix, and + # inserts the correct prefix itself. + assert isinstance(linker, linkers.VisualStudioLikeLinkerMixin) + linker.direct = True + compiler.extend(cls.use_linker_args(linker.exelist[0], '')) + else: + # On linux and macos rust will invoke the c compiler for + # linking, on windows it will use lld-link or link.exe. + # we will simply ask for the C compiler that corresponds to + # it, and use that. + cc = _detect_c_or_cpp_compiler(env, 'c', for_machine, override_compiler=override) + linker = cc.linker + + # Of course, we're not going to use any of that, we just + # need it to get the proper arguments to pass to rustc + c = linker.exelist[1] if linker.exelist[0].endswith('ccache') else linker.exelist[0] + compiler.extend(cls.use_linker_args(c, '')) + + env.coredata.add_lang_args(cls.language, cls, for_machine, env) + return cls( + compiler, version, for_machine, is_cross, info, exe_wrap, + linker=linker) + + _handle_exceptions(popen_exceptions, compilers) + raise EnvironmentException('Unreachable code (exception to make mypy happy)') + +def detect_d_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + from . import c, d + info = env.machines[for_machine] + + # Detect the target architecture, required for proper architecture handling on Windows. + # MSVC compiler is required for correct platform detection. + c_compiler = {'c': detect_c_compiler(env, for_machine)} + is_msvc = isinstance(c_compiler['c'], c.VisualStudioCCompiler) + if not is_msvc: + c_compiler = {} + + # Import here to avoid circular imports + from ..environment import detect_cpu_family + arch = detect_cpu_family(c_compiler) + if is_msvc and arch == 'x86': + arch = 'x86_mscoff' + + popen_exceptions = {} + is_cross = env.is_cross_build(for_machine) + compilers, ccache, exe_wrap = _get_compilers(env, 'd', for_machine) + cls: T.Type[d.DCompiler] + for exelist in compilers: + # Search for a D compiler. + # We prefer LDC over GDC unless overridden with the DC + # environment variable because LDC has a much more + # up to date language version at time (2016). + if os.path.basename(exelist[-1]).startswith(('ldmd', 'gdmd')): + raise EnvironmentException( + f'Meson does not support {exelist[-1]} as it is only a DMD frontend for another compiler.' + 'Please provide a valid value for DC or unset it so that Meson can resolve the compiler by itself.') + try: + p, out = Popen_safe(exelist + ['--version'])[0:2] + except OSError as e: + popen_exceptions[join_args(exelist + ['--version'])] = e + continue + version = search_version(out) + full_version = out.split('\n', 1)[0] + + if 'LLVM D compiler' in out: + cls = d.LLVMDCompiler + # LDC seems to require a file + # We cannot use NamedTemporaryFile on windows, its documented + # to not work for our uses. So, just use mkstemp and only have + # one path for simplicity. + o, f = tempfile.mkstemp('.d') + os.close(o) + + try: + if info.is_windows() or info.is_cygwin(): + objfile = os.path.basename(f)[:-1] + 'obj' + linker = guess_win_linker(env, + exelist, + cls, full_version, for_machine, + use_linker_prefix=True, invoked_directly=False, + extra_args=[f]) + else: + # LDC writes an object file to the current working directory. + # Clean it up. + objfile = os.path.basename(f)[:-1] + 'o' + linker = guess_nix_linker(env, + exelist, cls, full_version, for_machine, + extra_args=[f]) + finally: + windows_proof_rm(f) + windows_proof_rm(objfile) + + return cls( + exelist, version, for_machine, info, arch, + full_version=full_version, linker=linker, version_output=out) + elif 'gdc' in out: + cls = d.GnuDCompiler + linker = guess_nix_linker(env, exelist, cls, version, for_machine) + return cls( + exelist, version, for_machine, info, arch, + exe_wrapper=exe_wrap, is_cross=is_cross, + full_version=full_version, linker=linker) + elif 'The D Language Foundation' in out or 'Digital Mars' in out: + cls = d.DmdDCompiler + # DMD seems to require a file + # We cannot use NamedTemporaryFile on windows, its documented + # to not work for our uses. So, just use mkstemp and only have + # one path for simplicity. + o, f = tempfile.mkstemp('.d') + os.close(o) + + # DMD as different detection logic for x86 and x86_64 + arch_arg = '-m64' if arch == 'x86_64' else '-m32' + + try: + if info.is_windows() or info.is_cygwin(): + objfile = os.path.basename(f)[:-1] + 'obj' + linker = guess_win_linker(env, + exelist, cls, full_version, for_machine, + invoked_directly=False, extra_args=[f, arch_arg]) + else: + objfile = os.path.basename(f)[:-1] + 'o' + linker = guess_nix_linker(env, + exelist, cls, full_version, for_machine, + extra_args=[f, arch_arg]) + finally: + windows_proof_rm(f) + windows_proof_rm(objfile) + + return cls( + exelist, version, for_machine, info, arch, + full_version=full_version, linker=linker) + raise EnvironmentException('Unknown compiler: ' + join_args(exelist)) + + _handle_exceptions(popen_exceptions, compilers) + raise EnvironmentException('Unreachable code (exception to make mypy happy)') + +def detect_swift_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + from .swift import SwiftCompiler + exelist = env.lookup_binary_entry(for_machine, 'swift') + is_cross = env.is_cross_build(for_machine) + info = env.machines[for_machine] + if exelist is None: + # TODO support fallback + exelist = [defaults['swift'][0]] + + try: + p, _, err = Popen_safe_logged(exelist + ['-v'], msg='Detecting compiler via') + except OSError: + raise EnvironmentException('Could not execute Swift compiler: {}'.format(join_args(exelist))) + version = search_version(err) + if 'Swift' in err: + # As for 5.0.1 swiftc *requires* a file to check the linker: + with tempfile.NamedTemporaryFile(suffix='.swift') as f: + cls = SwiftCompiler + linker = guess_nix_linker(env, + exelist, cls, version, for_machine, + extra_args=[f.name]) + return cls( + exelist, version, for_machine, is_cross, info, linker=linker) + + raise EnvironmentException('Unknown compiler: ' + join_args(exelist)) + +def detect_nasm_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + from .asm import NasmCompiler, YasmCompiler, MetrowerksAsmCompilerARM, MetrowerksAsmCompilerEmbeddedPowerPC + compilers, _, _ = _get_compilers(env, 'nasm', for_machine) + is_cross = env.is_cross_build(for_machine) + + # We need a C compiler to properly detect the machine info and linker + cc = detect_c_compiler(env, for_machine) + if not is_cross: + from ..environment import detect_machine_info + info = detect_machine_info({'c': cc}) + else: + info = env.machines[for_machine] + + popen_exceptions: T.Dict[str, Exception] = {} + for comp in compilers: + if comp == ['nasm'] and is_windows() and not shutil.which(comp[0]): + # nasm is not in PATH on Windows by default + default_path = os.path.join(os.environ['ProgramFiles'], 'NASM') + comp[0] = shutil.which(comp[0], path=default_path) or comp[0] + try: + output = Popen_safe_logged(comp + ['--version'], msg='Detecting compiler via')[1] + except OSError as e: + popen_exceptions[' '.join(comp + ['--version'])] = e + continue + + version = search_version(output) + if 'NASM' in output: + comp_class = NasmCompiler + env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env) + return comp_class([], comp, version, for_machine, info, cc.linker, is_cross=is_cross) + elif 'yasm' in output: + comp_class = YasmCompiler + env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env) + return comp_class([], comp, version, for_machine, info, cc.linker, is_cross=is_cross) + elif 'Metrowerks' in output or 'Freescale' in output: + if 'ARM' in output: + comp_class_mwasmarm = MetrowerksAsmCompilerARM + env.coredata.add_lang_args(comp_class_mwasmarm.language, comp_class_mwasmarm, for_machine, env) + return comp_class_mwasmarm([], comp, version, for_machine, info, cc.linker, is_cross=is_cross) + else: + comp_class_mwasmeppc = MetrowerksAsmCompilerEmbeddedPowerPC + env.coredata.add_lang_args(comp_class_mwasmeppc.language, comp_class_mwasmeppc, for_machine, env) + return comp_class_mwasmeppc([], comp, version, for_machine, info, cc.linker, is_cross=is_cross) + + _handle_exceptions(popen_exceptions, compilers) + raise EnvironmentException('Unreachable code (exception to make mypy happy)') + +def detect_masm_compiler(env: 'Environment', for_machine: MachineChoice) -> Compiler: + # We need a C compiler to properly detect the machine info and linker + is_cross = env.is_cross_build(for_machine) + cc = detect_c_compiler(env, for_machine) + if not is_cross: + from ..environment import detect_machine_info + info = detect_machine_info({'c': cc}) + else: + info = env.machines[for_machine] + + from .asm import MasmCompiler, MasmARMCompiler + comp_class: T.Type[Compiler] + if info.cpu_family == 'x86': + comp = ['ml'] + comp_class = MasmCompiler + arg = '/?' + elif info.cpu_family == 'x86_64': + comp = ['ml64'] + comp_class = MasmCompiler + arg = '/?' + elif info.cpu_family == 'arm': + comp = ['armasm'] + comp_class = MasmARMCompiler + arg = '-h' + elif info.cpu_family == 'aarch64': + comp = ['armasm64'] + comp_class = MasmARMCompiler + arg = '-h' + else: + raise EnvironmentException(f'Platform {info.cpu_family} not supported by MASM') + + popen_exceptions: T.Dict[str, Exception] = {} + try: + output = Popen_safe(comp + [arg])[2] + version = search_version(output) + env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env) + return comp_class([], comp, version, for_machine, info, cc.linker, is_cross=is_cross) + except OSError as e: + popen_exceptions[' '.join(comp + [arg])] = e + _handle_exceptions(popen_exceptions, [comp]) + raise EnvironmentException('Unreachable code (exception to make mypy happy)') + +# GNU/Clang defines and version +# ============================= + +def _get_gnu_compiler_defines(compiler: T.List[str]) -> T.Dict[str, str]: + """ + Detect GNU compiler platform type (Apple, MinGW, Unix) + """ + # Arguments to output compiler pre-processor defines to stdout + # gcc, g++, and gfortran all support these arguments + args = compiler + ['-E', '-dM', '-'] + mlog.debug(f'Running command: {join_args(args)}') + p, output, error = Popen_safe(args, write='', stdin=subprocess.PIPE) + if p.returncode != 0: + raise EnvironmentException('Unable to detect GNU compiler type:\n' + f'Compiler stdout:\n{output}\n-----\n' + f'Compiler stderr:\n{error}\n-----\n') + # Parse several lines of the type: + # `#define ___SOME_DEF some_value` + # and extract `___SOME_DEF` + defines: T.Dict[str, str] = {} + for line in output.split('\n'): + if not line: + continue + d, *rest = line.split(' ', 2) + if d != '#define': + continue + if len(rest) == 1: + defines[rest[0]] = '' + if len(rest) == 2: + defines[rest[0]] = rest[1] + return defines + +def _get_clang_compiler_defines(compiler: T.List[str]) -> T.Dict[str, str]: + """ + Get the list of Clang pre-processor defines + """ + args = compiler + ['-E', '-dM', '-'] + mlog.debug(f'Running command: {join_args(args)}') + p, output, error = Popen_safe(args, write='', stdin=subprocess.PIPE) + if p.returncode != 0: + raise EnvironmentException('Unable to get clang pre-processor defines:\n' + f'Compiler stdout:\n{output}\n-----\n' + f'Compiler stderr:\n{error}\n-----\n') + defines: T.Dict[str, str] = {} + for line in output.split('\n'): + if not line: + continue + d, *rest = line.split(' ', 2) + if d != '#define': + continue + if len(rest) == 1: + defines[rest[0]] = '' + if len(rest) == 2: + defines[rest[0]] = rest[1] + return defines + +def _get_gnu_version_from_defines(defines: T.Dict[str, str]) -> str: + dot = '.' + major = defines.get('__GNUC__', '0') + minor = defines.get('__GNUC_MINOR__', '0') + patch = defines.get('__GNUC_PATCHLEVEL__', '0') + return dot.join((major, minor, patch)) + +def _get_lcc_version_from_defines(defines: T.Dict[str, str]) -> str: + dot = '.' + generation_and_major = defines.get('__LCC__', '100') + generation = generation_and_major[:1] + major = generation_and_major[1:] + minor = defines.get('__LCC_MINOR__', '0') + return dot.join((generation, major, minor)) diff --git a/vendored-meson/meson/mesonbuild/compilers/fortran.py b/vendored-meson/meson/mesonbuild/compilers/fortran.py new file mode 100644 index 000000000000..a80fdff5f44c --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/fortran.py @@ -0,0 +1,546 @@ +# Copyright 2012-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import typing as T +import os + +from .. import coredata +from .compilers import ( + clike_debug_args, + Compiler, + CompileCheckMode, +) +from .mixins.clike import CLikeCompiler +from .mixins.gnu import ( + GnuCompiler, gnulike_buildtype_args, gnu_optimization_args +) +from .mixins.intel import IntelGnuLikeCompiler, IntelVisualStudioLikeCompiler +from .mixins.clang import ClangCompiler +from .mixins.elbrus import ElbrusCompiler +from .mixins.pgi import PGICompiler + +from mesonbuild.mesonlib import ( + version_compare, MesonException, + LibType, OptionKey, +) + +if T.TYPE_CHECKING: + from ..coredata import MutableKeyedOptionDictType, KeyedOptionDictType + from ..dependencies import Dependency + from ..envconfig import MachineInfo + from ..environment import Environment + from ..linkers.linkers import DynamicLinker + from ..mesonlib import MachineChoice + from ..programs import ExternalProgram + + +class FortranCompiler(CLikeCompiler, Compiler): + + language = 'fortran' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + Compiler.__init__(self, [], exelist, version, for_machine, info, + is_cross=is_cross, full_version=full_version, linker=linker) + CLikeCompiler.__init__(self, exe_wrapper) + + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + raise MesonException('Fortran does not have "has_function" capability.\n' + 'It is better to test if a Fortran capability is working like:\n\n' + "meson.get_compiler('fortran').links('block; end block; end program')\n\n" + 'that example is to see if the compiler has Fortran 2008 Block element.') + + def _get_basic_compiler_args(self, env: 'Environment', mode: CompileCheckMode) -> T.Tuple[T.List[str], T.List[str]]: + cargs = env.coredata.get_external_args(self.for_machine, self.language) + largs = env.coredata.get_external_link_args(self.for_machine, self.language) + return cargs, largs + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + source_name = 'sanitycheckf.f90' + code = 'program main; print *, "Fortran compilation is working."; end program\n' + return self._sanity_check_impl(work_dir, environment, source_name, code) + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + return gnulike_buildtype_args[buildtype] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return gnu_optimization_args[optimization_level] + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + return clike_debug_args[is_debug] + + def get_preprocess_only_args(self) -> T.List[str]: + return ['-cpp'] + super().get_preprocess_only_args() + + def get_module_incdir_args(self) -> T.Tuple[str, ...]: + return ('-I', ) + + def get_module_outdir_args(self, path: str) -> T.List[str]: + return ['-module', path] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:2] == '-I' or i[:2] == '-L': + parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:])) + + return parameter_list + + def module_name_to_filename(self, module_name: str) -> str: + if '_' in module_name: # submodule + s = module_name.lower() + if self.id in {'gcc', 'intel', 'intel-cl'}: + filename = s.replace('_', '@') + '.smod' + elif self.id in {'pgi', 'flang'}: + filename = s.replace('_', '-') + '.mod' + else: + filename = s + '.mod' + else: # module + filename = module_name.lower() + '.mod' + + return filename + + def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], + libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]: + code = 'stop; end program' + return self._find_library_impl(libname, env, extra_dirs, code, libtype, lib_prefix_warning) + + def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + return self._has_multi_arguments(args, env, 'stop; end program') + + def has_multi_link_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + return self._has_multi_link_arguments(args, env, 'stop; end program') + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = super().get_options() + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts.update({ + key: coredata.UserComboOption( + 'Fortran language standard to use', + ['none'], + 'none', + ), + }) + return opts + + +class GnuFortranCompiler(GnuCompiler, FortranCompiler): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + GnuCompiler.__init__(self, defines) + default_warn_args = ['-Wall'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-Wpedantic', '-fimplicit-none'], + 'everything': default_warn_args + ['-Wextra', '-Wpedantic', '-fimplicit-none']} + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = FortranCompiler.get_options(self) + fortran_stds = ['legacy', 'f95', 'f2003'] + if version_compare(self.version, '>=4.4.0'): + fortran_stds += ['f2008'] + if version_compare(self.version, '>=8.0.0'): + fortran_stds += ['f2018'] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none'] + fortran_stds + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args: T.List[str] = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + if std.value != 'none': + args.append('-std=' + std.value) + return args + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + # Disabled until this is fixed: + # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=62162 + # return ['-cpp', '-MD', '-MQ', outtarget] + return [] + + def get_module_outdir_args(self, path: str) -> T.List[str]: + return ['-J' + path] + + def language_stdlib_only_link_flags(self, env: 'Environment') -> T.List[str]: + # We need to apply the search prefix here, as these link arguments may + # be passed to a different compiler with a different set of default + # search paths, such as when using Clang for C/C++ and gfortran for + # fortran, + search_dirs: T.List[str] = [] + for d in self.get_compiler_dirs(env, 'libraries'): + search_dirs.append(f'-L{d}') + return search_dirs + ['-lgfortran', '-lm'] + + def has_header(self, hname: str, prefix: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[['CompileCheckMode'], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + disable_cache: bool = False) -> T.Tuple[bool, bool]: + ''' + Derived from mixins/clike.py:has_header, but without C-style usage of + __has_include which breaks with GCC-Fortran 10: + https://github.com/mesonbuild/meson/issues/7017 + ''' + code = f'{prefix}\n#include <{hname}>' + return self.compiles(code, env, extra_args=extra_args, + dependencies=dependencies, mode=CompileCheckMode.PREPROCESS, disable_cache=disable_cache) + + +class ElbrusFortranCompiler(ElbrusCompiler, FortranCompiler): + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + defines: T.Optional[T.Dict[str, str]] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, is_cross, + info, exe_wrapper, linker=linker, full_version=full_version) + ElbrusCompiler.__init__(self) + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = FortranCompiler.get_options(self) + fortran_stds = ['f95', 'f2003', 'f2008', 'gnu', 'legacy', 'f2008ts'] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none'] + fortran_stds + return opts + + def get_module_outdir_args(self, path: str) -> T.List[str]: + return ['-J' + path] + + +class G95FortranCompiler(FortranCompiler): + + LINKER_PREFIX = '-Wl,' + id = 'g95' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + default_warn_args = ['-Wall'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-Wextra'], + '3': default_warn_args + ['-Wextra', '-pedantic'], + 'everything': default_warn_args + ['-Wextra', '-pedantic']} + + def get_module_outdir_args(self, path: str) -> T.List[str]: + return ['-fmod=' + path] + + def get_no_warn_args(self) -> T.List[str]: + # FIXME: Confirm that there's no compiler option to disable all warnings + return [] + + +class SunFortranCompiler(FortranCompiler): + + LINKER_PREFIX = '-Wl,' + id = 'sun' + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + return ['-fpp'] + + def get_always_args(self) -> T.List[str]: + return [] + + def get_warn_args(self, level: str) -> T.List[str]: + return [] + + def get_module_incdir_args(self) -> T.Tuple[str, ...]: + return ('-M', ) + + def get_module_outdir_args(self, path: str) -> T.List[str]: + return ['-moddir=' + path] + + def openmp_flags(self) -> T.List[str]: + return ['-xopenmp'] + + +class IntelFortranCompiler(IntelGnuLikeCompiler, FortranCompiler): + + file_suffixes = ('f90', 'f', 'for', 'ftn', 'fpp', ) + id = 'intel' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + # FIXME: Add support for OS X and Windows in detect_fortran_compiler so + # we are sent the type of compiler + IntelGnuLikeCompiler.__init__(self) + default_warn_args = ['-warn', 'general', '-warn', 'truncated_source'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['-warn', 'unused'], + '3': ['-warn', 'all'], + 'everything': ['-warn', 'all']} + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = FortranCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'legacy', 'f95', 'f2003', 'f2008', 'f2018'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args: T.List[str] = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + stds = {'legacy': 'none', 'f95': 'f95', 'f2003': 'f03', 'f2008': 'f08', 'f2018': 'f18'} + if std.value != 'none': + args.append('-stand=' + stds[std.value]) + return args + + def get_preprocess_only_args(self) -> T.List[str]: + return ['-cpp', '-EP'] + + def language_stdlib_only_link_flags(self, env: 'Environment') -> T.List[str]: + # TODO: needs default search path added + return ['-lifcore', '-limf'] + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + return ['-gen-dep=' + outtarget, '-gen-depformat=make'] + + +class IntelLLVMFortranCompiler(IntelFortranCompiler): + + id = 'intel-llvm' + + +class IntelClFortranCompiler(IntelVisualStudioLikeCompiler, FortranCompiler): + + file_suffixes = ('f90', 'f', 'for', 'ftn', 'fpp', ) + always_args = ['/nologo'] + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + is_cross: bool, info: 'MachineInfo', target: str, + exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + IntelVisualStudioLikeCompiler.__init__(self, target) + + default_warn_args = ['/warn:general', '/warn:truncated_source'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + ['/warn:unused'], + '3': ['/warn:all'], + 'everything': ['/warn:all']} + + def get_options(self) -> 'MutableKeyedOptionDictType': + opts = FortranCompiler.get_options(self) + key = OptionKey('std', machine=self.for_machine, lang=self.language) + opts[key].choices = ['none', 'legacy', 'f95', 'f2003', 'f2008', 'f2018'] + return opts + + def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]: + args: T.List[str] = [] + key = OptionKey('std', machine=self.for_machine, lang=self.language) + std = options[key] + stds = {'legacy': 'none', 'f95': 'f95', 'f2003': 'f03', 'f2008': 'f08', 'f2018': 'f18'} + if std.value != 'none': + args.append('/stand:' + stds[std.value]) + return args + + def get_module_outdir_args(self, path: str) -> T.List[str]: + return ['/module:' + path] + + +class IntelLLVMClFortranCompiler(IntelClFortranCompiler): + + id = 'intel-llvm-cl' + +class PathScaleFortranCompiler(FortranCompiler): + + id = 'pathscale' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + default_warn_args = ['-fullwarn'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args, + '3': default_warn_args, + 'everything': default_warn_args} + + def openmp_flags(self) -> T.List[str]: + return ['-mp'] + + +class PGIFortranCompiler(PGICompiler, FortranCompiler): + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + PGICompiler.__init__(self) + + default_warn_args = ['-Minform=inform'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args, + '3': default_warn_args + ['-Mdclchk'], + 'everything': default_warn_args + ['-Mdclchk']} + + def language_stdlib_only_link_flags(self, env: 'Environment') -> T.List[str]: + # TODO: needs default search path added + return ['-lpgf90rtl', '-lpgf90', '-lpgf90_rpm1', '-lpgf902', + '-lpgf90rtl', '-lpgftnrtl', '-lrt'] + + +class NvidiaHPC_FortranCompiler(PGICompiler, FortranCompiler): + + id = 'nvidia_hpc' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + PGICompiler.__init__(self) + + default_warn_args = ['-Minform=inform'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args, + '3': default_warn_args + ['-Mdclchk'], + 'everything': default_warn_args + ['-Mdclchk']} + + +class FlangFortranCompiler(ClangCompiler, FortranCompiler): + + id = 'flang' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + ClangCompiler.__init__(self, {}) + default_warn_args = ['-Minform=inform'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args, + '3': default_warn_args, + 'everything': default_warn_args} + + def language_stdlib_only_link_flags(self, env: 'Environment') -> T.List[str]: + # We need to apply the search prefix here, as these link arguments may + # be passed to a different compiler with a different set of default + # search paths, such as when using Clang for C/C++ and gfortran for + # fortran, + # XXX: Untested.... + search_dirs: T.List[str] = [] + for d in self.get_compiler_dirs(env, 'libraries'): + search_dirs.append(f'-L{d}') + return search_dirs + ['-lflang', '-lpgmath'] + +class ArmLtdFlangFortranCompiler(FlangFortranCompiler): + + id = 'armltdflang' + +class Open64FortranCompiler(FortranCompiler): + + id = 'open64' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + default_warn_args = ['-fullwarn'] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args, + '3': default_warn_args, + 'everything': default_warn_args} + + def openmp_flags(self) -> T.List[str]: + return ['-mp'] + + +class NAGFortranCompiler(FortranCompiler): + + id = 'nagfor' + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, is_cross: bool, + info: 'MachineInfo', exe_wrapper: T.Optional['ExternalProgram'] = None, + linker: T.Optional['DynamicLinker'] = None, + full_version: T.Optional[str] = None): + FortranCompiler.__init__(self, exelist, version, for_machine, + is_cross, info, exe_wrapper, linker=linker, + full_version=full_version) + # Warnings are on by default; -w disables (by category): + self.warn_args = { + '0': ['-w=all'], + '1': [], + '2': [], + '3': [], + 'everything': [], + } + + def get_always_args(self) -> T.List[str]: + return self.get_nagfor_quiet(self.version) + + def get_module_outdir_args(self, path: str) -> T.List[str]: + return ['-mdir', path] + + @staticmethod + def get_nagfor_quiet(version: str) -> T.List[str]: + return ['-quiet'] if version_compare(version, '>=7100') else [] + + def get_pic_args(self) -> T.List[str]: + return ['-PIC'] + + def get_preprocess_only_args(self) -> T.List[str]: + return ['-fpp'] + + def get_std_exe_link_args(self) -> T.List[str]: + return self.get_always_args() + + def openmp_flags(self) -> T.List[str]: + return ['-openmp'] diff --git a/vendored-meson/meson/mesonbuild/compilers/java.py b/vendored-meson/meson/mesonbuild/compilers/java.py new file mode 100644 index 000000000000..42cf0a159ce0 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/java.py @@ -0,0 +1,125 @@ +# Copyright 2012-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +import os +import os.path +import shutil +import subprocess +import textwrap +import typing as T + +from ..mesonlib import EnvironmentException +from .compilers import Compiler, java_buildtype_args +from .mixins.islinker import BasicLinkerIsCompilerMixin + +if T.TYPE_CHECKING: + from ..envconfig import MachineInfo + from ..environment import Environment + from ..mesonlib import MachineChoice + +class JavaCompiler(BasicLinkerIsCompilerMixin, Compiler): + + language = 'java' + id = 'unknown' + + _WARNING_LEVELS: T.Dict[str, T.List[str]] = { + '0': ['-nowarn'], + '1': ['-Xlint:all'], + '2': ['-Xlint:all', '-Xdoclint:all'], + '3': ['-Xlint:all', '-Xdoclint:all'], + } + + def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice, + info: 'MachineInfo', full_version: T.Optional[str] = None): + super().__init__([], exelist, version, for_machine, info, full_version=full_version) + self.javarunner = 'java' + + def get_warn_args(self, level: str) -> T.List[str]: + return self._WARNING_LEVELS[level] + + def get_werror_args(self) -> T.List[str]: + return ['-Werror'] + + def get_no_warn_args(self) -> T.List[str]: + return ['-nowarn'] + + def get_output_args(self, outputname: str) -> T.List[str]: + if outputname == '': + outputname = './' + return ['-d', outputname, '-s', outputname] + + def get_pic_args(self) -> T.List[str]: + return [] + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + return [] + + def get_pch_name(self, name: str) -> str: + return '' + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + return java_buildtype_args[buildtype] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], + build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i in {'-cp', '-classpath', '-sourcepath'} and idx + 1 < len(parameter_list): + path_list = parameter_list[idx + 1].split(os.pathsep) + path_list = [os.path.normpath(os.path.join(build_dir, x)) for x in path_list] + parameter_list[idx + 1] = os.pathsep.join(path_list) + + return parameter_list + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + src = 'SanityCheck.java' + obj = 'SanityCheck' + source_name = os.path.join(work_dir, src) + with open(source_name, 'w', encoding='utf-8') as ofile: + ofile.write(textwrap.dedent( + '''class SanityCheck { + public static void main(String[] args) { + int i; + } + } + ''')) + pc = subprocess.Popen(self.exelist + [src], cwd=work_dir) + pc.wait() + if pc.returncode != 0: + raise EnvironmentException(f'Java compiler {self.name_string()} cannot compile programs.') + runner = shutil.which(self.javarunner) + if runner: + cmdlist = [runner, obj] + pe = subprocess.Popen(cmdlist, cwd=work_dir) + pe.wait() + if pe.returncode != 0: + raise EnvironmentException(f'Executables created by Java compiler {self.name_string()} are not runnable.') + else: + m = "Java Virtual Machine wasn't found, but it's needed by Meson. " \ + "Please install a JRE.\nIf you have specific needs where this " \ + "requirement doesn't make sense, please open a bug at " \ + "https://github.com/mesonbuild/meson/issues/new and tell us " \ + "all about it." + raise EnvironmentException(m) + + def needs_static_linker(self) -> bool: + return False + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return [] + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + if is_debug: + return ['-g'] + return ['-g:none'] diff --git a/vendored-meson/meson/mesonbuild/compilers/mixins/__init__.py b/vendored-meson/meson/mesonbuild/compilers/mixins/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/vendored-meson/meson/mesonbuild/compilers/mixins/arm.py b/vendored-meson/meson/mesonbuild/compilers/mixins/arm.py new file mode 100644 index 000000000000..7c533273923c --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/mixins/arm.py @@ -0,0 +1,201 @@ +# Copyright 2012-2020 Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +"""Representations specific to the arm family of compilers.""" + +import os +import typing as T + +from ... import mesonlib +from ...linkers.linkers import ArmClangDynamicLinker +from ...mesonlib import OptionKey +from ..compilers import clike_debug_args +from .clang import clang_color_args + +if T.TYPE_CHECKING: + from ...environment import Environment + from ...compilers.compilers import Compiler +else: + # This is a bit clever, for mypy we pretend that these mixins descend from + # Compiler, so we get all of the methods and attributes defined for us, but + # for runtime we make them descend from object (which all classes normally + # do). This gives up DRYer type checking, with no runtime impact + Compiler = object + +arm_buildtype_args = { + 'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + 'custom': [], +} # type: T.Dict[str, T.List[str]] + +arm_optimization_args = { + 'plain': [], + '0': ['-O0'], + 'g': ['-g'], + '1': ['-O1'], + '2': [], # Compiler defaults to -O2 + '3': ['-O3', '-Otime'], + 's': ['-O3'], # Compiler defaults to -Ospace +} # type: T.Dict[str, T.List[str]] + +armclang_buildtype_args = { + 'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + 'custom': [], +} # type: T.Dict[str, T.List[str]] + +armclang_optimization_args = { + 'plain': [], + '0': [], # Compiler defaults to -O0 + 'g': ['-g'], + '1': ['-O1'], + '2': ['-O2'], + '3': ['-O3'], + 's': ['-Oz'] +} # type: T.Dict[str, T.List[str]] + + +class ArmCompiler(Compiler): + + """Functionality that is common to all ARM family compilers.""" + + id = 'arm' + + def __init__(self) -> None: + if not self.is_cross: + raise mesonlib.EnvironmentException('armcc supports only cross-compilation.') + default_warn_args = [] # type: T.List[str] + self.warn_args = {'0': [], + '1': default_warn_args, + '2': default_warn_args + [], + '3': default_warn_args + [], + 'everything': default_warn_args + []} # type: T.Dict[str, T.List[str]] + # Assembly + self.can_compile_suffixes.add('s') + self.can_compile_suffixes.add('sx') + + def get_pic_args(self) -> T.List[str]: + # FIXME: Add /ropi, /rwpi, /fpic etc. qualifiers to --apcs + return [] + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + return arm_buildtype_args[buildtype] + + # Override CCompiler.get_always_args + def get_always_args(self) -> T.List[str]: + return [] + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + return ['--depend_target', outtarget, '--depend', outfile, '--depend_single_line'] + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + # FIXME: Add required arguments + # NOTE from armcc user guide: + # "Support for Precompiled Header (PCH) files is deprecated from ARM Compiler 5.05 + # onwards on all platforms. Note that ARM Compiler on Windows 8 never supported + # PCH files." + return [] + + def get_pch_suffix(self) -> str: + # NOTE from armcc user guide: + # "Support for Precompiled Header (PCH) files is deprecated from ARM Compiler 5.05 + # onwards on all platforms. Note that ARM Compiler on Windows 8 never supported + # PCH files." + return 'pch' + + def thread_flags(self, env: 'Environment') -> T.List[str]: + return [] + + def get_coverage_args(self) -> T.List[str]: + return [] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return arm_optimization_args[optimization_level] + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + return clike_debug_args[is_debug] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:2] == '-I' or i[:2] == '-L': + parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:])) + + return parameter_list + + +class ArmclangCompiler(Compiler): + ''' + This is the Keil armclang. + ''' + + id = 'armclang' + + def __init__(self) -> None: + if not self.is_cross: + raise mesonlib.EnvironmentException('armclang supports only cross-compilation.') + # Check whether 'armlink' is available in path + if not isinstance(self.linker, ArmClangDynamicLinker): + raise mesonlib.EnvironmentException(f'Unsupported Linker {self.linker.exelist}, must be armlink') + if not mesonlib.version_compare(self.version, '==' + self.linker.version): + raise mesonlib.EnvironmentException('armlink version does not match with compiler version') + self.base_options = { + OptionKey(o) for o in + ['b_pch', 'b_lto', 'b_pgo', 'b_sanitize', 'b_coverage', + 'b_ndebug', 'b_staticpic', 'b_colorout']} + # Assembly + self.can_compile_suffixes.add('s') + self.can_compile_suffixes.add('sx') + + def get_pic_args(self) -> T.List[str]: + # PIC support is not enabled by default for ARM, + # if users want to use it, they need to add the required arguments explicitly + return [] + + def get_colorout_args(self, colortype: str) -> T.List[str]: + return clang_color_args[colortype][:] + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + return armclang_buildtype_args[buildtype] + + def get_pch_suffix(self) -> str: + return 'gch' + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + # Workaround for Clang bug http://llvm.org/bugs/show_bug.cgi?id=15136 + # This flag is internal to Clang (or at least not documented on the man page) + # so it might change semantics at any time. + return ['-include-pch', os.path.join(pch_dir, self.get_pch_name(header))] + + def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]: + return ['-MD', '-MT', outtarget, '-MF', outfile] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return armclang_optimization_args[optimization_level] + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + return clike_debug_args[is_debug] + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:2] == '-I' or i[:2] == '-L': + parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:])) + + return parameter_list diff --git a/vendored-meson/meson/mesonbuild/compilers/mixins/ccrx.py b/vendored-meson/meson/mesonbuild/compilers/mixins/ccrx.py new file mode 100644 index 000000000000..71c103338e25 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/mixins/ccrx.py @@ -0,0 +1,135 @@ +# Copyright 2012-2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +"""Representations specific to the Renesas CC-RX compiler family.""" + +import os +import typing as T + +from ...mesonlib import EnvironmentException + +if T.TYPE_CHECKING: + from ...envconfig import MachineInfo + from ...environment import Environment + from ...compilers.compilers import Compiler +else: + # This is a bit clever, for mypy we pretend that these mixins descend from + # Compiler, so we get all of the methods and attributes defined for us, but + # for runtime we make them descend from object (which all classes normally + # do). This gives up DRYer type checking, with no runtime impact + Compiler = object + +ccrx_buildtype_args: T.Dict[str, T.List[str]] = { + 'plain': [], + 'debug': [], + 'debugoptimized': [], + 'release': [], + 'minsize': [], + 'custom': [], +} + +ccrx_optimization_args: T.Dict[str, T.List[str]] = { + '0': ['-optimize=0'], + 'g': ['-optimize=0'], + '1': ['-optimize=1'], + '2': ['-optimize=2'], + '3': ['-optimize=max'], + 's': ['-optimize=2', '-size'] +} + +ccrx_debug_args: T.Dict[bool, T.List[str]] = { + False: [], + True: ['-debug'] +} + + +class CcrxCompiler(Compiler): + + if T.TYPE_CHECKING: + is_cross = True + can_compile_suffixes: T.Set[str] = set() + + id = 'ccrx' + + def __init__(self) -> None: + if not self.is_cross: + raise EnvironmentException('ccrx supports only cross-compilation.') + # Assembly + self.can_compile_suffixes.add('src') + default_warn_args: T.List[str] = [] + self.warn_args: T.Dict[str, T.List[str]] = { + '0': [], + '1': default_warn_args, + '2': default_warn_args + [], + '3': default_warn_args + [], + 'everything': default_warn_args + []} + + def get_pic_args(self) -> T.List[str]: + # PIC support is not enabled by default for CCRX, + # if users want to use it, they need to add the required arguments explicitly + return [] + + def get_buildtype_args(self, buildtype: str) -> T.List[str]: + return ccrx_buildtype_args[buildtype] + + def get_pch_suffix(self) -> str: + return 'pch' + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + return [] + + def thread_flags(self, env: 'Environment') -> T.List[str]: + return [] + + def get_coverage_args(self) -> T.List[str]: + return [] + + def get_no_stdinc_args(self) -> T.List[str]: + return [] + + def get_no_stdlib_link_args(self) -> T.List[str]: + return [] + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return ccrx_optimization_args[optimization_level] + + def get_debug_args(self, is_debug: bool) -> T.List[str]: + return ccrx_debug_args[is_debug] + + @classmethod + def _unix_args_to_native(cls, args: T.List[str], info: MachineInfo) -> T.List[str]: + result: T.List[str] = [] + for i in args: + if i.startswith('-D'): + i = '-define=' + i[2:] + if i.startswith('-I'): + i = '-include=' + i[2:] + if i.startswith('-Wl,-rpath='): + continue + elif i == '--print-search-dirs': + continue + elif i.startswith('-L'): + continue + elif not i.startswith('-lib=') and i.endswith(('.a', '.lib')): + i = '-lib=' + i + result.append(i) + return result + + def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]: + for idx, i in enumerate(parameter_list): + if i[:9] == '-include=': + parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:])) + + return parameter_list diff --git a/vendored-meson/meson/mesonbuild/compilers/mixins/clang.py b/vendored-meson/meson/mesonbuild/compilers/mixins/clang.py new file mode 100644 index 000000000000..24f24a8df0db --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/mixins/clang.py @@ -0,0 +1,180 @@ +# Copyright 2019-2022 The meson development team +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + +"""Abstractions for the LLVM/Clang compiler family.""" + +import os +import shutil +import typing as T + +from ... import mesonlib +from ...linkers.linkers import AppleDynamicLinker, ClangClDynamicLinker, LLVMDynamicLinker, GnuGoldDynamicLinker, \ + MoldDynamicLinker +from ...mesonlib import OptionKey +from ..compilers import CompileCheckMode +from .gnu import GnuLikeCompiler + +if T.TYPE_CHECKING: + from ...environment import Environment + from ...dependencies import Dependency # noqa: F401 + +clang_color_args: T.Dict[str, T.List[str]] = { + 'auto': ['-fcolor-diagnostics'], + 'always': ['-fcolor-diagnostics'], + 'never': ['-fno-color-diagnostics'], +} + +clang_optimization_args: T.Dict[str, T.List[str]] = { + 'plain': [], + '0': ['-O0'], + 'g': ['-Og'], + '1': ['-O1'], + '2': ['-O2'], + '3': ['-O3'], + 's': ['-Oz'], +} + +class ClangCompiler(GnuLikeCompiler): + + id = 'clang' + + def __init__(self, defines: T.Optional[T.Dict[str, str]]): + super().__init__() + self.defines = defines or {} + self.base_options.update( + {OptionKey('b_colorout'), OptionKey('b_lto_threads'), OptionKey('b_lto_mode'), OptionKey('b_thinlto_cache'), + OptionKey('b_thinlto_cache_dir')}) + + # TODO: this really should be part of the linker base_options, but + # linkers don't have base_options. + if isinstance(self.linker, AppleDynamicLinker): + self.base_options.add(OptionKey('b_bitcode')) + # All Clang backends can also do LLVM IR + self.can_compile_suffixes.add('ll') + + def get_colorout_args(self, colortype: str) -> T.List[str]: + return clang_color_args[colortype][:] + + def has_builtin_define(self, define: str) -> bool: + return define in self.defines + + def get_builtin_define(self, define: str) -> T.Optional[str]: + return self.defines.get(define) + + def get_optimization_args(self, optimization_level: str) -> T.List[str]: + return clang_optimization_args[optimization_level] + + def get_pch_suffix(self) -> str: + return 'pch' + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + # Workaround for Clang bug http://llvm.org/bugs/show_bug.cgi?id=15136 + # This flag is internal to Clang (or at least not documented on the man page) + # so it might change semantics at any time. + return ['-include-pch', os.path.join(pch_dir, self.get_pch_name(header))] + + def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: + # Clang is different than GCC, it will return True when a symbol isn't + # defined in a header. Specifically this seems to have something to do + # with functions that may be in a header on some systems, but not all of + # them. `strlcat` specifically with can trigger this. + myargs: T.List[str] = ['-Werror=implicit-function-declaration'] + if mode is CompileCheckMode.COMPILE: + myargs.extend(['-Werror=unknown-warning-option', '-Werror=unused-command-line-argument']) + if mesonlib.version_compare(self.version, '>=3.6.0'): + myargs.append('-Werror=ignored-optimization-argument') + return super().get_compiler_check_args(mode) + myargs + + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + if extra_args is None: + extra_args = [] + # Starting with XCode 8, we need to pass this to force linker + # visibility to obey OS X/iOS/tvOS minimum version targets with + # -mmacosx-version-min, -miphoneos-version-min, -mtvos-version-min etc. + # https://github.com/Homebrew/homebrew-core/issues/3727 + # TODO: this really should be communicated by the linker + if isinstance(self.linker, AppleDynamicLinker) and mesonlib.version_compare(self.version, '>=8.0'): + extra_args.append('-Wl,-no_weak_imports') + return super().has_function(funcname, prefix, env, extra_args=extra_args, + dependencies=dependencies) + + def openmp_flags(self) -> T.List[str]: + if mesonlib.version_compare(self.version, '>=3.8.0'): + return ['-fopenmp'] + elif mesonlib.version_compare(self.version, '>=3.7.0'): + return ['-fopenmp=libomp'] + else: + # Shouldn't work, but it'll be checked explicitly in the OpenMP dependency. + return [] + + @classmethod + def use_linker_args(cls, linker: str, version: str) -> T.List[str]: + # Clang additionally can use a linker specified as a path, which GCC + # (and other gcc-like compilers) cannot. This is because clang (being + # llvm based) is retargetable, while GCC is not. + # + + # qcld: Qualcomm Snapdragon linker, based on LLVM + if linker == 'qcld': + return ['-fuse-ld=qcld'] + if linker == 'mold': + return ['-fuse-ld=mold'] + + if shutil.which(linker): + if not shutil.which(linker): + raise mesonlib.MesonException( + f'Cannot find linker {linker}.') + return [f'-fuse-ld={linker}'] + return super().use_linker_args(linker, version) + + def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]: + # Clang only warns about unknown or ignored attributes, so force an + # error. + return ['-Werror=attributes'] + + def get_coverage_link_args(self) -> T.List[str]: + return ['--coverage'] + + def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]: + args: T.List[str] = [] + if mode == 'thin': + # ThinLTO requires the use of gold, lld, ld64, lld-link or mold 1.1+ + if isinstance(self.linker, (MoldDynamicLinker)): + # https://github.com/rui314/mold/commit/46995bcfc3e3113133620bf16445c5f13cd76a18 + if not mesonlib.version_compare(self.linker.version, '>=1.1'): + raise mesonlib.MesonException("LLVM's ThinLTO requires mold 1.1+") + elif not isinstance(self.linker, (AppleDynamicLinker, ClangClDynamicLinker, LLVMDynamicLinker, GnuGoldDynamicLinker)): + raise mesonlib.MesonException(f"LLVM's ThinLTO only works with gold, lld, lld-link, ld64 or mold, not {self.linker.id}") + args.append(f'-flto={mode}') + else: + assert mode == 'default', 'someone forgot to wire something up' + args.extend(super().get_lto_compile_args(threads=threads)) + return args + + def get_lto_link_args(self, *, threads: int = 0, mode: str = 'default', + thinlto_cache_dir: T.Optional[str] = None) -> T.List[str]: + args = self.get_lto_compile_args(threads=threads, mode=mode) + if mode == 'thin' and thinlto_cache_dir is not None: + # We check for ThinLTO linker support above in get_lto_compile_args, and all of them support + # get_thinlto_cache_args as well + args.extend(self.linker.get_thinlto_cache_args(thinlto_cache_dir)) + # In clang -flto-jobs=0 means auto, and is the default if unspecified, just like in meson + if threads > 0: + if not mesonlib.version_compare(self.version, '>=4.0.0'): + raise mesonlib.MesonException('clang support for LTO threads requires clang >=4.0') + args.append(f'-flto-jobs={threads}') + return args diff --git a/vendored-meson/meson/mesonbuild/compilers/mixins/clike.py b/vendored-meson/meson/mesonbuild/compilers/mixins/clike.py new file mode 100644 index 000000000000..da361856ca72 --- /dev/null +++ b/vendored-meson/meson/mesonbuild/compilers/mixins/clike.py @@ -0,0 +1,1353 @@ +# Copyright 2012-2022 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import annotations + + +"""Mixin classes to be shared between C and C++ compilers. + +Without this we'll end up with awful diamond inheritance problems. The goal +of this is to have mixin's, which are classes that are designed *not* to be +standalone, they only work through inheritance. +""" + +import collections +import functools +import glob +import itertools +import os +import re +import subprocess +import copy +import typing as T +from pathlib import Path + +from ... import arglist +from ... import mesonlib +from ... import mlog +from ...linkers.linkers import GnuLikeDynamicLinkerMixin, SolarisDynamicLinker, CompCertDynamicLinker +from ...mesonlib import LibType +from ...coredata import OptionKey +from .. import compilers +from ..compilers import CompileCheckMode +from .visualstudio import VisualStudioLikeCompiler + +if T.TYPE_CHECKING: + from ...dependencies import Dependency + from ..._typing import ImmutableListProtocol + from ...environment import Environment + from ...compilers.compilers import Compiler + from ...programs import ExternalProgram +else: + # This is a bit clever, for mypy we pretend that these mixins descend from + # Compiler, so we get all of the methods and attributes defined for us, but + # for runtime we make them descend from object (which all classes normally + # do). This gives up DRYer type checking, with no runtime impact + Compiler = object + +GROUP_FLAGS = re.compile(r'''\.so (?:\.[0-9]+)? (?:\.[0-9]+)? (?:\.[0-9]+)?$ | + ^(?:-Wl,)?-l | + \.a$''', re.X) + +class CLikeCompilerArgs(arglist.CompilerArgs): + prepend_prefixes = ('-I', '-L') + dedup2_prefixes = ('-I', '-isystem', '-L', '-D', '-U') + + # NOTE: not thorough. A list of potential corner cases can be found in + # https://github.com/mesonbuild/meson/pull/4593#pullrequestreview-182016038 + dedup1_prefixes = ('-l', '-Wl,-l', '-Wl,--export-dynamic') + dedup1_suffixes = ('.lib', '.dll', '.so', '.dylib', '.a') + dedup1_args = ('-c', '-S', '-E', '-pipe', '-pthread') + + def to_native(self, copy: bool = False) -> T.List[str]: + # This seems to be allowed, but could never work? + assert isinstance(self.compiler, compilers.Compiler), 'How did you get here' + + # Check if we need to add --start/end-group for circular dependencies + # between static libraries, and for recursively searching for symbols + # needed by static libraries that are provided by object files or + # shared libraries. + self.flush_pre_post() + if copy: + new = self.copy() + else: + new = self + # This covers all ld.bfd, ld.gold, ld.gold, and xild on Linux, which + # all act like (or are) gnu ld + # TODO: this could probably be added to the DynamicLinker instead + if isinstance(self.compiler.linker, (GnuLikeDynamicLinkerMixin, SolarisDynamicLinker, CompCertDynamicLinker)): + group_start = -1 + group_end = -1 + for i, each in enumerate(new): + if not GROUP_FLAGS.search(each): + continue + group_end = i + if group_start < 0: + # First occurrence of a library + group_start = i + if group_start >= 0: + # Last occurrence of a library + new.insert(group_end + 1, '-Wl,--end-group') + new.insert(group_start, '-Wl,--start-group') + # Remove system/default include paths added with -isystem + default_dirs = self.compiler.get_default_include_dirs() + if default_dirs: + real_default_dirs = [self._cached_realpath(i) for i in default_dirs] + bad_idx_list: T.List[int] = [] + for i, each in enumerate(new): + if not each.startswith('-isystem'): + continue + + # Remove the -isystem and the path if the path is a default path + if (each == '-isystem' and + i < (len(new) - 1) and + self._cached_realpath(new[i + 1]) in real_default_dirs): + bad_idx_list += [i, i + 1] + elif each.startswith('-isystem=') and self._cached_realpath(each[9:]) in real_default_dirs: + bad_idx_list += [i] + elif self._cached_realpath(each[8:]) in real_default_dirs: + bad_idx_list += [i] + for i in reversed(bad_idx_list): + new.pop(i) + return self.compiler.unix_args_to_native(new._container) + + @staticmethod + @functools.lru_cache(maxsize=None) + def _cached_realpath(arg: str) -> str: + return os.path.realpath(arg) + + def __repr__(self) -> str: + self.flush_pre_post() + return f'CLikeCompilerArgs({self.compiler!r}, {self._container!r})' + + +class CLikeCompiler(Compiler): + + """Shared bits for the C and CPP Compilers.""" + + if T.TYPE_CHECKING: + warn_args: T.Dict[str, T.List[str]] = {} + + # TODO: Replace this manual cache with functools.lru_cache + find_library_cache: T.Dict[T.Tuple[T.Tuple[str, ...], str, T.Tuple[str, ...], str, LibType], T.Optional[T.List[str]]] = {} + find_framework_cache: T.Dict[T.Tuple[T.Tuple[str, ...], str, T.Tuple[str, ...], bool], T.Optional[T.List[str]]] = {} + internal_libs = arglist.UNIXY_COMPILER_INTERNAL_LIBS + + def __init__(self, exe_wrapper: T.Optional['ExternalProgram'] = None): + # If a child ObjC or CPP class has already set it, don't set it ourselves + self.can_compile_suffixes.add('h') + # If the exe wrapper was not found, pretend it wasn't set so that the + # sanity check is skipped and compiler checks use fallbacks. + if not exe_wrapper or not exe_wrapper.found() or not exe_wrapper.get_command(): + self.exe_wrapper = None + else: + self.exe_wrapper = exe_wrapper + # Lazy initialized in get_preprocessor() + self.preprocessor: T.Optional[Compiler] = None + + def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CLikeCompilerArgs: + # This is correct, mypy just doesn't understand co-operative inheritance + return CLikeCompilerArgs(self, args) + + def needs_static_linker(self) -> bool: + return True # When compiling static libraries, so yes. + + def get_always_args(self) -> T.List[str]: + ''' + Args that are always-on for all C compilers other than MSVC + ''' + return self.get_largefile_args() + + def get_no_stdinc_args(self) -> T.List[str]: + return ['-nostdinc'] + + def get_no_stdlib_link_args(self) -> T.List[str]: + return ['-nostdlib'] + + def get_warn_args(self, level: str) -> T.List[str]: + # TODO: this should be an enum + return self.warn_args[level] + + def get_no_warn_args(self) -> T.List[str]: + # Almost every compiler uses this for disabling warnings + return ['-w'] + + def get_depfile_suffix(self) -> str: + return 'd' + + def get_preprocess_only_args(self) -> T.List[str]: + return ['-E', '-P'] + + def get_compile_only_args(self) -> T.List[str]: + return ['-c'] + + def get_no_optimization_args(self) -> T.List[str]: + return ['-O0'] + + def get_output_args(self, outputname: str) -> T.List[str]: + return ['-o', outputname] + + def get_werror_args(self) -> T.List[str]: + return ['-Werror'] + + def get_include_args(self, path: str, is_system: bool) -> T.List[str]: + if path == '': + path = '.' + if is_system: + return ['-isystem', path] + return ['-I' + path] + + def get_compiler_dirs(self, env: 'Environment', name: str) -> T.List[str]: + ''' + Get dirs from the compiler, either `libraries:` or `programs:` + ''' + return [] + + @functools.lru_cache() + def _get_library_dirs(self, env: 'Environment', + elf_class: T.Optional[int] = None) -> 'ImmutableListProtocol[str]': + # TODO: replace elf_class with enum + dirs = self.get_compiler_dirs(env, 'libraries') + if elf_class is None or elf_class == 0: + return dirs + + # if we do have an elf class for 32-bit or 64-bit, we want to check that + # the directory in question contains libraries of the appropriate class. Since + # system directories aren't mixed, we only need to check one file for each + # directory and go by that. If we can't check the file for some reason, assume + # the compiler knows what it's doing, and accept the directory anyway. + retval: T.List[str] = [] + for d in dirs: + files = [f for f in os.listdir(d) if f.endswith('.so') and os.path.isfile(os.path.join(d, f))] + # if no files, accept directory and move on + if not files: + retval.append(d) + continue + + for f in files: + file_to_check = os.path.join(d, f) + try: + with open(file_to_check, 'rb') as fd: + header = fd.read(5) + # if file is not an ELF file, it's weird, but accept dir + # if it is elf, and the class matches, accept dir + if header[1:4] != b'ELF' or int(header[4]) == elf_class: + retval.append(d) + # at this point, it's an ELF file which doesn't match the + # appropriate elf_class, so skip this one + # stop scanning after the first successful read + break + except OSError: + # Skip the file if we can't read it + pass + + return retval + + def get_library_dirs(self, env: 'Environment', + elf_class: T.Optional[int] = None) -> T.List[str]: + """Wrap the lru_cache so that we return a new copy and don't allow + mutation of the cached value. + """ + return self._get_library_dirs(env, elf_class).copy() + + @functools.lru_cache() + def _get_program_dirs(self, env: 'Environment') -> 'ImmutableListProtocol[str]': + ''' + Programs used by the compiler. Also where toolchain DLLs such as + libstdc++-6.dll are found with MinGW. + ''' + return self.get_compiler_dirs(env, 'programs') + + def get_program_dirs(self, env: 'Environment') -> T.List[str]: + return self._get_program_dirs(env).copy() + + def get_pic_args(self) -> T.List[str]: + return ['-fPIC'] + + def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]: + return ['-include', os.path.basename(header)] + + def get_pch_name(self, name: str) -> str: + return os.path.basename(name) + '.' + self.get_pch_suffix() + + def get_default_include_dirs(self) -> T.List[str]: + return [] + + def gen_export_dynamic_link_args(self, env: 'Environment') -> T.List[str]: + return self.linker.export_dynamic_args(env) + + def gen_import_library_args(self, implibname: str) -> T.List[str]: + return self.linker.import_library_args(implibname) + + def _sanity_check_impl(self, work_dir: str, environment: 'Environment', + sname: str, code: str) -> None: + mlog.debug('Sanity testing ' + self.get_display_language() + ' compiler:', mesonlib.join_args(self.exelist)) + mlog.debug(f'Is cross compiler: {self.is_cross!s}.') + + source_name = os.path.join(work_dir, sname) + binname = sname.rsplit('.', 1)[0] + mode = CompileCheckMode.LINK + if self.is_cross: + binname += '_cross' + if self.exe_wrapper is None: + # Linking cross built C/C++ apps is painful. You can't really + # tell if you should use -nostdlib or not and for example + # on OSX the compiler binary is the same but you need + # a ton of compiler flags to differentiate between + # arm and x86_64. So just compile. + mode = CompileCheckMode.COMPILE + cargs, largs = self._get_basic_compiler_args(environment, mode) + extra_flags = cargs + self.linker_to_compiler_args(largs) + + # Is a valid executable output for all toolchains and platforms + binname += '.exe' + # Write binary check source + binary_name = os.path.join(work_dir, binname) + with open(source_name, 'w', encoding='utf-8') as ofile: + ofile.write(code) + # Compile sanity check + # NOTE: extra_flags must be added at the end. On MSVC, it might contain a '/link' argument + # after which all further arguments will be passed directly to the linker + cmdlist = self.exelist + [sname] + self.get_output_args(binname) + extra_flags + pc, stdo, stde = mesonlib.Popen_safe(cmdlist, cwd=work_dir) + mlog.debug('Sanity check compiler command line:', mesonlib.join_args(cmdlist)) + mlog.debug('Sanity check compile stdout:') + mlog.debug(stdo) + mlog.debug('-----\nSanity check compile stderr:') + mlog.debug(stde) + mlog.debug('-----') + if pc.returncode != 0: + raise mesonlib.EnvironmentException(f'Compiler {self.name_string()} cannot compile programs.') + # Run sanity check + if self.is_cross: + if self.exe_wrapper is None: + # Can't check if the binaries run so we have to assume they do + return + cmdlist = self.exe_wrapper.get_command() + [binary_name] + else: + cmdlist = [binary_name] + mlog.debug('Running test binary command: ', mesonlib.join_args(cmdlist)) + try: + # fortran code writes to stdout + pe = subprocess.run(cmdlist, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + except Exception as e: + raise mesonlib.EnvironmentException(f'Could not invoke sanity test executable: {e!s}.') + if pe.returncode != 0: + raise mesonlib.EnvironmentException(f'Executables created by {self.language} compiler {self.name_string()} are not runnable.') + + def sanity_check(self, work_dir: str, environment: 'Environment') -> None: + code = 'int main(void) { int class=0; return class; }\n' + return self._sanity_check_impl(work_dir, environment, 'sanitycheckc.c', code) + + def check_header(self, hname: str, prefix: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[['CompileCheckMode'], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + code = f'''{prefix} + #include <{hname}>''' + return self.compiles(code, env, extra_args=extra_args, + dependencies=dependencies) + + def has_header(self, hname: str, prefix: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[['CompileCheckMode'], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None, + disable_cache: bool = False) -> T.Tuple[bool, bool]: + code = f'''{prefix} + #ifdef __has_include + #if !__has_include("{hname}") + #error "Header '{hname}' could not be found" + #endif + #else + #include <{hname}> + #endif''' + return self.compiles(code, env, extra_args=extra_args, + dependencies=dependencies, mode=CompileCheckMode.PREPROCESS, disable_cache=disable_cache) + + def has_header_symbol(self, hname: str, symbol: str, prefix: str, + env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + t = f'''{prefix} + #include <{hname}> + int main(void) {{ + /* If it's not defined as a macro, try to use as a symbol */ + #ifndef {symbol} + {symbol}; + #endif + return 0; + }}''' + return self.compiles(t, env, extra_args=extra_args, + dependencies=dependencies) + + def _get_basic_compiler_args(self, env: 'Environment', mode: CompileCheckMode) -> T.Tuple[T.List[str], T.List[str]]: + cargs: T.List[str] = [] + largs: T.List[str] = [] + if mode is CompileCheckMode.LINK: + # Sometimes we need to manually select the CRT to use with MSVC. + # One example is when trying to do a compiler check that involves + # linking with static libraries since MSVC won't select a CRT for + # us in that case and will error out asking us to pick one. + try: + crt_val = env.coredata.options[OptionKey('b_vscrt')].value + buildtype = env.coredata.options[OptionKey('buildtype')].value + cargs += self.get_crt_compile_args(crt_val, buildtype) + except (KeyError, AttributeError): + pass + + # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS and CPPFLAGS from the env + sys_args = env.coredata.get_external_args(self.for_machine, self.language) + if isinstance(sys_args, str): + sys_args = [sys_args] + # Apparently it is a thing to inject linker flags both + # via CFLAGS _and_ LDFLAGS, even though the former are + # also used during linking. These flags can break + # argument checks. Thanks, Autotools. + cleaned_sys_args = self.remove_linkerlike_args(sys_args) + cargs += cleaned_sys_args + + if mode is CompileCheckMode.LINK: + ld_value = env.lookup_binary_entry(self.for_machine, self.language + '_ld') + if ld_value is not None: + largs += self.use_linker_args(ld_value[0], self.version) + + # Add LDFLAGS from the env + sys_ld_args = env.coredata.get_external_link_args(self.for_machine, self.language) + # CFLAGS and CXXFLAGS go to both linking and compiling, but we want them + # to only appear on the command line once. Remove dupes. + largs += [x for x in sys_ld_args if x not in sys_args] + + cargs += self.get_compiler_args_for_mode(mode) + return cargs, largs + + def build_wrapper_args(self, env: 'Environment', + extra_args: T.Union[None, arglist.CompilerArgs, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]], + dependencies: T.Optional[T.List['Dependency']], + mode: CompileCheckMode = CompileCheckMode.COMPILE) -> arglist.CompilerArgs: + # TODO: the caller should handle the listing of these arguments + if extra_args is None: + extra_args = [] + else: + # TODO: we want to do this in the caller + extra_args = mesonlib.listify(extra_args) + extra_args = mesonlib.listify([e(mode.value) if callable(e) else e for e in extra_args]) + + if dependencies is None: + dependencies = [] + elif not isinstance(dependencies, collections.abc.Iterable): + # TODO: we want to ensure the front end does the listifing here + dependencies = [dependencies] + # Collect compiler arguments + cargs: arglist.CompilerArgs = self.compiler_args() + largs: T.List[str] = [] + for d in dependencies: + # Add compile flags needed by dependencies + cargs += d.get_compile_args() + if mode is CompileCheckMode.LINK: + # Add link flags needed to find dependencies + largs += d.get_link_args() + + ca, la = self._get_basic_compiler_args(env, mode) + cargs += ca + largs += la + + cargs += self.get_compiler_check_args(mode) + + # on MSVC compiler and linker flags must be separated by the "/link" argument + # at this point, the '/link' argument may already be part of extra_args, otherwise, it is added here + if self.linker_to_compiler_args([]) == ['/link'] and largs != [] and '/link' not in extra_args: + extra_args += ['/link'] + + args = cargs + extra_args + largs + return args + + def run(self, code: 'mesonlib.FileOrString', env: 'Environment', *, + extra_args: T.Union[T.List[str], T.Callable[[CompileCheckMode], T.List[str]], None] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> compilers.RunResult: + need_exe_wrapper = env.need_exe_wrapper(self.for_machine) + if need_exe_wrapper and self.exe_wrapper is None: + raise compilers.CrossNoRunException('Can not run test applications in this cross environment.') + with self._build_wrapper(code, env, extra_args, dependencies, mode=CompileCheckMode.LINK, want_output=True) as p: + if p.returncode != 0: + mlog.debug(f'Could not compile test file {p.input_name}: {p.returncode}\n') + return compilers.RunResult(False) + if need_exe_wrapper: + cmdlist = self.exe_wrapper.get_command() + [p.output_name] + else: + cmdlist = [p.output_name] + try: + pe, so, se = mesonlib.Popen_safe(cmdlist) + except Exception as e: + mlog.debug(f'Could not run: {cmdlist} (error: {e})\n') + return compilers.RunResult(False) + + mlog.debug('Program stdout:\n') + mlog.debug(so) + mlog.debug('Program stderr:\n') + mlog.debug(se) + return compilers.RunResult(True, pe.returncode, so, se) + + def _compile_int(self, expression: str, prefix: str, env: 'Environment', + extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]], + dependencies: T.Optional[T.List['Dependency']]) -> bool: + t = f'''{prefix} + #include + int main(void) {{ static int a[1-2*!({expression})]; a[0]=0; return 0; }}''' + return self.compiles(t, env, extra_args=extra_args, + dependencies=dependencies)[0] + + def cross_compute_int(self, expression: str, low: T.Optional[int], high: T.Optional[int], + guess: T.Optional[int], prefix: str, env: 'Environment', + extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: + # Try user's guess first + if isinstance(guess, int): + if self._compile_int(f'{expression} == {guess}', prefix, env, extra_args, dependencies): + return guess + + # If no bounds are given, compute them in the limit of int32 + maxint = 0x7fffffff + minint = -0x80000000 + if not isinstance(low, int) or not isinstance(high, int): + if self._compile_int(f'{expression} >= 0', prefix, env, extra_args, dependencies): + low = cur = 0 + while self._compile_int(f'{expression} > {cur}', prefix, env, extra_args, dependencies): + low = cur + 1 + if low > maxint: + raise mesonlib.EnvironmentException('Cross-compile check overflowed') + cur = min(cur * 2 + 1, maxint) + high = cur + else: + high = cur = -1 + while self._compile_int(f'{expression} < {cur}', prefix, env, extra_args, dependencies): + high = cur - 1 + if high < minint: + raise mesonlib.EnvironmentException('Cross-compile check overflowed') + cur = max(cur * 2, minint) + low = cur + else: + # Sanity check limits given by user + if high < low: + raise mesonlib.EnvironmentException('high limit smaller than low limit') + condition = f'{expression} <= {high} && {expression} >= {low}' + if not self._compile_int(condition, prefix, env, extra_args, dependencies): + raise mesonlib.EnvironmentException('Value out of given range') + + # Binary search + while low != high: + cur = low + int((high - low) / 2) + if self._compile_int(f'{expression} <= {cur}', prefix, env, extra_args, dependencies): + high = cur + else: + low = cur + 1 + + return low + + def compute_int(self, expression: str, low: T.Optional[int], high: T.Optional[int], + guess: T.Optional[int], prefix: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]], + dependencies: T.Optional[T.List['Dependency']] = None) -> int: + if extra_args is None: + extra_args = [] + if self.is_cross: + return self.cross_compute_int(expression, low, high, guess, prefix, env, extra_args, dependencies) + t = f'''{prefix} + #include + int main(void) {{ + printf("%ld\\n", (long)({expression})); + return 0; + }}''' + res = self.run(t, env, extra_args=extra_args, + dependencies=dependencies) + if not res.compiled: + return -1 + if res.returncode != 0: + raise mesonlib.EnvironmentException('Could not run compute_int test binary.') + return int(res.stdout) + + def cross_sizeof(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: + if extra_args is None: + extra_args = [] + t = f'''{prefix} + #include + int main(void) {{ + {typename} something; + return 0; + }}''' + if not self.compiles(t, env, extra_args=extra_args, + dependencies=dependencies)[0]: + return -1 + return self.cross_compute_int(f'sizeof({typename})', None, None, None, prefix, env, extra_args, dependencies) + + def sizeof(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[int, bool]: + if extra_args is None: + extra_args = [] + if self.is_cross: + r = self.cross_sizeof(typename, prefix, env, extra_args=extra_args, + dependencies=dependencies) + return r, False + t = f'''{prefix} + #include + int main(void) {{ + printf("%ld\\n", (long)(sizeof({typename}))); + return 0; + }}''' + res = self.cached_run(t, env, extra_args=extra_args, + dependencies=dependencies) + if not res.compiled: + return -1, False + if res.returncode != 0: + raise mesonlib.EnvironmentException('Could not run sizeof test binary.') + return int(res.stdout), res.cached + + def cross_alignment(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> int: + if extra_args is None: + extra_args = [] + t = f'''{prefix} + #include + int main(void) {{ + {typename} something; + return 0; + }}''' + if not self.compiles(t, env, extra_args=extra_args, + dependencies=dependencies)[0]: + return -1 + t = f'''{prefix} + #include + struct tmp {{ + char c; + {typename} target; + }};''' + return self.cross_compute_int('offsetof(struct tmp, target)', None, None, None, t, env, extra_args, dependencies) + + def alignment(self, typename: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[int, bool]: + if extra_args is None: + extra_args = [] + if self.is_cross: + r = self.cross_alignment(typename, prefix, env, extra_args=extra_args, + dependencies=dependencies) + return r, False + t = f'''{prefix} + #include + #include + struct tmp {{ + char c; + {typename} target; + }}; + int main(void) {{ + printf("%d", (int)offsetof(struct tmp, target)); + return 0; + }}''' + res = self.cached_run(t, env, extra_args=extra_args, + dependencies=dependencies) + if not res.compiled: + raise mesonlib.EnvironmentException('Could not compile alignment test.') + if res.returncode != 0: + raise mesonlib.EnvironmentException('Could not run alignment test binary.') + align = int(res.stdout) + if align == 0: + raise mesonlib.EnvironmentException(f'Could not determine alignment of {typename}. Sorry. You might want to file a bug.') + return align, res.cached + + def get_define(self, dname: str, prefix: str, env: 'Environment', + extra_args: T.Union[T.List[str], T.Callable[[CompileCheckMode], T.List[str]]], + dependencies: T.Optional[T.List['Dependency']], + disable_cache: bool = False) -> T.Tuple[str, bool]: + delim = '"MESON_GET_DEFINE_DELIMITER"' + code = f''' + {prefix} + #ifndef {dname} + # define {dname} + #endif + {delim}\n{dname}''' + args = self.build_wrapper_args(env, extra_args, dependencies, + mode=CompileCheckMode.PREPROCESS).to_native() + func = functools.partial(self.cached_compile, code, env.coredata, extra_args=args, mode=CompileCheckMode.PREPROCESS) + if disable_cache: + func = functools.partial(self.compile, code, extra_args=args, mode=CompileCheckMode.PREPROCESS) + with func() as p: + cached = p.cached + if p.returncode != 0: + raise mesonlib.EnvironmentException(f'Could not get define {dname!r}') + # Get the preprocessed value after the delimiter, + # minus the extra newline at the end and + # merge string literals. + return self._concatenate_string_literals(p.stdout.split(delim + '\n')[-1][:-1]).strip(), cached + + def get_return_value(self, fname: str, rtype: str, prefix: str, + env: 'Environment', extra_args: T.Optional[T.List[str]], + dependencies: T.Optional[T.List['Dependency']]) -> T.Union[str, int]: + # TODO: rtype should be an enum. + # TODO: maybe we can use overload to tell mypy when this will return int vs str? + if rtype == 'string': + fmt = '%s' + cast = '(char*)' + elif rtype == 'int': + fmt = '%lli' + cast = '(long long int)' + else: + raise AssertionError(f'BUG: Unknown return type {rtype!r}') + code = f'''{prefix} + #include + int main(void) {{ + printf ("{fmt}", {cast} {fname}()); + return 0; + }}''' + res = self.run(code, env, extra_args=extra_args, dependencies=dependencies) + if not res.compiled: + raise mesonlib.EnvironmentException(f'Could not get return value of {fname}()') + if rtype == 'string': + return res.stdout + elif rtype == 'int': + try: + return int(res.stdout.strip()) + except ValueError: + raise mesonlib.EnvironmentException(f'Return value of {fname}() is not an int') + assert False, 'Unreachable' + + @staticmethod + def _no_prototype_templ() -> T.Tuple[str, str]: + """ + Try to find the function without a prototype from a header by defining + our own dummy prototype and trying to link with the C library (and + whatever else the compiler links in by default). This is very similar + to the check performed by Autoconf for AC_CHECK_FUNCS. + """ + # Define the symbol to something else since it is defined by the + # includes or defines listed by the user or by the compiler. This may + # include, for instance _GNU_SOURCE which must be defined before + # limits.h, which includes features.h + # Then, undef the symbol to get rid of it completely. + head = ''' + #define {func} meson_disable_define_of_{func} + {prefix} + #include + #undef {func} + ''' + # Override any GCC internal prototype and declare our own definition for + # the symbol. Use char because that's unlikely to be an actual return + # value for a function which ensures that we override the definition. + head += ''' + #ifdef __cplusplus + extern "C" + #endif + char {func} (void); + ''' + # The actual function call + main = ''' + int main(void) {{ + return {func} (); + }}''' + return head, main + + @staticmethod + def _have_prototype_templ() -> T.Tuple[str, str]: + """ + Returns a head-er and main() call that uses the headers listed by the + user for the function prototype while checking if a function exists. + """ + # Add the 'prefix', aka defines, includes, etc that the user provides + # This may include, for instance _GNU_SOURCE which must be defined + # before limits.h, which includes features.h + head = '{prefix}\n#include \n' + # We don't know what the function takes or returns, so return it as an int. + # Just taking the address or comparing it to void is not enough because + # compilers are smart enough to optimize it away. The resulting binary + # is not run so we don't care what the return value is. + main = '''\nint main(void) {{ + void *a = (void*) &{func}; + long long b = (long long) a; + return (int) b; + }}''' + return head, main + + def has_function(self, funcname: str, prefix: str, env: 'Environment', *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + """Determine if a function exists. + + First, this function looks for the symbol in the default libraries + provided by the compiler (stdlib + a few others usually). If that + fails, it checks if any of the headers specified in the prefix provide + an implementation of the function, and if that fails, it checks if it's + implemented as a compiler-builtin. + """ + if extra_args is None: + extra_args = [] + + # Short-circuit if the check is already provided by the cross-info file + varname = 'has function ' + funcname + varname = varname.replace(' ', '_') + if self.is_cross: + val = env.properties.host.get(varname, None) + if val is not None: + if isinstance(val, bool): + return val, False + raise mesonlib.EnvironmentException(f'Cross variable {varname} is not a boolean.') + + # TODO: we really need a protocol for this, + # + # class StrProto(typing.Protocol): + # def __str__(self) -> str: ... + fargs: T.Dict[str, T.Union[str, bool, int]] = {'prefix': prefix, 'func': funcname} + + # glibc defines functions that are not available on Linux as stubs that + # fail with ENOSYS (such as e.g. lchmod). In this case we want to fail + # instead of detecting the stub as a valid symbol. + # We already included limits.h earlier to ensure that these are defined + # for stub functions. + stubs_fail = ''' + #if defined __stub_{func} || defined __stub___{func} + fail fail fail this function is not going to work + #endif + ''' + + # If we have any includes in the prefix supplied by the user, assume + # that the user wants us to use the symbol prototype defined in those + # includes. If not, then try to do the Autoconf-style check with + # a dummy prototype definition of our own. + # This is needed when the linker determines symbol availability from an + # SDK based on the prototype in the header provided by the SDK. + # Ignoring this prototype would result in the symbol always being + # marked as available. + if '#include' in prefix: + head, main = self._have_prototype_templ() + else: + head, main = self._no_prototype_templ() + templ = head + stubs_fail + main + + res, cached = self.links(templ.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) + if res: + return True, cached + + # MSVC does not have compiler __builtin_-s. + if self.get_id() in {'msvc', 'intel-cl'}: + return False, False + + # Detect function as a built-in + # + # Some functions like alloca() are defined as compiler built-ins which + # are inlined by the compiler and you can't take their address, so we + # need to look for them differently. On nice compilers like clang, we + # can just directly use the __has_builtin() macro. + fargs['no_includes'] = '#include' not in prefix + is_builtin = funcname.startswith('__builtin_') + fargs['is_builtin'] = is_builtin + fargs['__builtin_'] = '' if is_builtin else '__builtin_' + t = '''{prefix} + int main(void) {{ + + /* With some toolchains (MSYS2/mingw for example) the compiler + * provides various builtins which are not really implemented and + * fall back to the stdlib where they aren't provided and fail at + * build/link time. In case the user provides a header, including + * the header didn't lead to the function being defined, and the + * function we are checking isn't a builtin itself we assume the + * builtin is not functional and we just error out. */ + #if !{no_includes:d} && !defined({func}) && !{is_builtin:d} + #error "No definition for {__builtin_}{func} found in the prefix" + #endif + + #ifdef __has_builtin + #if !__has_builtin({__builtin_}{func}) + #error "{__builtin_}{func} not found" + #endif + #elif ! defined({func}) + {__builtin_}{func}; + #endif + return 0; + }}''' + return self.links(t.format(**fargs), env, extra_args=extra_args, + dependencies=dependencies) + + def has_members(self, typename: str, membernames: T.List[str], + prefix: str, env: 'Environment', *, + extra_args: T.Union[None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]]] = None, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + if extra_args is None: + extra_args = [] + # Create code that accesses all members + members = ''.join(f'foo.{member};\n' for member in membernames) + t = f'''{prefix} + void bar(void) {{ + {typename} foo; + {members} + }}''' + return self.compiles(t, env, extra_args=extra_args, + dependencies=dependencies) + + def has_type(self, typename: str, prefix: str, env: 'Environment', + extra_args: T.Union[T.List[str], T.Callable[[CompileCheckMode], T.List[str]]], *, + dependencies: T.Optional[T.List['Dependency']] = None) -> T.Tuple[bool, bool]: + t = f'''{prefix} + void bar(void) {{ + sizeof({typename}); + }}''' + return self.compiles(t, env, extra_args=extra_args, + dependencies=dependencies) + + def _symbols_have_underscore_prefix_searchbin(self, env: 'Environment') -> bool: + ''' + Check if symbols have underscore prefix by compiling a small test binary + and then searching the binary for the string, + ''' + symbol_name = b'meson_uscore_prefix' + code = '''#ifdef __cplusplus + extern "C" { + #endif + void ''' + symbol_name.decode() + ''' (void) {} + #ifdef __cplusplus + } + #endif + ''' + args = self.get_compiler_check_args(CompileCheckMode.COMPILE) + n = '_symbols_have_underscore_prefix_searchbin' + with self._build_wrapper(code, env, extra_args=args, mode=CompileCheckMode.COMPILE, want_output=True) as p: + if p.returncode != 0: + raise RuntimeError(f'BUG: Unable to compile {n!r} check: {p.stderr}') + if not os.path.isfile(p.output_name): + raise RuntimeError(f'BUG: Can\'t find compiled test code for {n!r} check') + with open(p.output_name, 'rb') as o: + for line in o: + # Check if the underscore form of the symbol is somewhere + # in the output file. + if b'_' + symbol_name in line: + mlog.debug("Underscore prefix check found prefixed function in binary") + return True + # Else, check if the non-underscored form is present + elif symbol_name in line: + mlog.debug("Underscore prefix check found non-prefixed function in binary") + return False + raise RuntimeError(f'BUG: {n!r} check did not find symbol string in binary') + + def _symbols_have_underscore_prefix_define(self, env: 'Environment') -> T.Optional[bool]: + ''' + Check if symbols have underscore prefix by querying the + __USER_LABEL_PREFIX__ define that most compilers provide + for this. Return if functions have underscore prefix or None + if it was not possible to determine, like when the compiler + does not set the define or the define has an unexpected value. + ''' + delim = '"MESON_HAVE_UNDERSCORE_DELIMITER" ' + code = f''' + #ifndef __USER_LABEL_PREFIX__ + #define MESON_UNDERSCORE_PREFIX unsupported + #else + #define MESON_UNDERSCORE_PREFIX __USER_LABEL_PREFIX__ + #endif + {delim}MESON_UNDERSCORE_PREFIX + ''' + with self._build_wrapper(code, env, mode=CompileCheckMode.PREPROCESS, want_output=False) as p: + if p.returncode != 0: + raise RuntimeError(f'BUG: Unable to preprocess _symbols_have_underscore_prefix_define check: {p.stdout}') + symbol_prefix = p.stdout.partition(delim)[-1].rstrip() + + mlog.debug(f'Queried compiler for function prefix: __USER_LABEL_PREFIX__ is "{symbol_prefix!s}"') + if symbol_prefix == '_': + return True + elif symbol_prefix == '': + return False + else: + return None + + def _symbols_have_underscore_prefix_list(self, env: 'Environment') -> T.Optional[bool]: + ''' + Check if symbols have underscore prefix by consulting a hardcoded + list of cases where we know the results. + Return if functions have underscore prefix or None if unknown. + ''' + m = env.machines[self.for_machine] + # Darwin always uses the underscore prefix, not matter what + if m.is_darwin(): + return True + # Windows uses the underscore prefix on x86 (32bit) only + if m.is_windows() or m.is_cygwin(): + return m.cpu_family == 'x86' + return None + + def symbols_have_underscore_prefix(self, env: 'Environment') -> bool: + ''' + Check if the compiler prefixes an underscore to global C symbols + ''' + # First, try to query the compiler directly + result = self._symbols_have_underscore_prefix_define(env) + if result is not None: + return result + + # Else, try to consult a hardcoded list of cases we know + # absolutely have an underscore prefix + result = self._symbols_have_underscore_prefix_list(env) + if result is not None: + return result + + # As a last resort, try search in a compiled binary, which is the + # most unreliable way of checking this, see #5482 + return self._symbols_have_underscore_prefix_searchbin(env) + + def _get_patterns(self, env: 'Environment', prefixes: T.List[str], suffixes: T.List[str], shared: bool = False) -> T.List[str]: + patterns: T.List[str] = [] + for p in prefixes: + for s in suffixes: + patterns.append(p + '{}.' + s) + if shared and env.machines[self.for_machine].is_openbsd(): + # Shared libraries on OpenBSD can be named libfoo.so.X.Y: + # https://www.openbsd.org/faq/ports/specialtopics.html#SharedLibs + # + # This globbing is probably the best matching we can do since regex + # is expensive. It's wrong in many edge cases, but it will match + # correctly-named libraries and hopefully no one on OpenBSD names + # their files libfoo.so.9a.7b.1.0 + for p in prefixes: + patterns.append(p + '{}.so.[0-9]*.[0-9]*') + return patterns + + def get_library_naming(self, env: 'Environment', libtype: LibType, strict: bool = False) -> T.Tuple[str, ...]: + ''' + Get library prefixes and suffixes for the target platform ordered by + priority + ''' + stlibext = ['a'] + # We've always allowed libname to be both `foo` and `libfoo`, and now + # people depend on it. Also, some people use prebuilt `foo.so` instead + # of `libfoo.so` for unknown reasons, and may also want to create + # `foo.so` by setting name_prefix to '' + if strict and not isinstance(self, VisualStudioLikeCompiler): # lib prefix is not usually used with msvc + prefixes = ['lib'] + else: + prefixes = ['lib', ''] + # Library suffixes and prefixes + if env.machines[self.for_machine].is_darwin(): + shlibext = ['dylib', 'so'] + elif env.machines[self.for_machine].is_windows(): + # FIXME: .lib files can be import or static so we should read the + # file, figure out which one it is, and reject the wrong kind. + if isinstance(self, VisualStudioLikeCompiler): + shlibext = ['lib'] + else: + shlibext = ['dll.a', 'lib', 'dll'] + # Yep, static libraries can also be foo.lib + stlibext += ['lib'] + elif env.machines[self.for_machine].is_cygwin(): + shlibext = ['dll', 'dll.a'] + prefixes = ['cyg'] + prefixes + else: + # Linux/BSDs + shlibext = ['so'] + # Search priority + if libtype is LibType.PREFER_SHARED: + patterns = self._get_patterns(env, prefixes, shlibext, True) + patterns.extend([x for x in self._get_patterns(env, prefixes, stlibext, False) if x not in patterns]) + elif libtype is LibType.PREFER_STATIC: + patterns = self._get_patterns(env, prefixes, stlibext, False) + patterns.extend([x for x in self._get_patterns(env, prefixes, shlibext, True) if x not in patterns]) + elif libtype is LibType.SHARED: + patterns = self._get_patterns(env, prefixes, shlibext, True) + else: + assert libtype is LibType.STATIC + patterns = self._get_patterns(env, prefixes, stlibext, False) + return tuple(patterns) + + @staticmethod + def _sort_shlibs_openbsd(libs: T.List[str]) -> T.List[str]: + filtered: T.List[str] = [] + for lib in libs: + # Validate file as a shared library of type libfoo.so.X.Y + ret = lib.rsplit('.so.', maxsplit=1) + if len(ret) != 2: + continue + try: + float(ret[1]) + except ValueError: + continue + filtered.append(lib) + float_cmp = lambda x: float(x.rsplit('.so.', maxsplit=1)[1]) + return sorted(filtered, key=float_cmp, reverse=True) + + @classmethod + def _get_trials_from_pattern(cls, pattern: str, directory: str, libname: str) -> T.List[Path]: + f = Path(directory) / pattern.format(libname) + # Globbing for OpenBSD + if '*' in pattern: + # NOTE: globbing matches directories and broken symlinks + # so we have to do an isfile test on it later + return [Path(x) for x in cls._sort_shlibs_openbsd(glob.glob(str(f)))] + return [f] + + @staticmethod + def _get_file_from_list(env: 'Environment', paths: T.List[Path]) -> Path: + ''' + We just check whether the library exists. We can't do a link check + because the library might have unresolved symbols that require other + libraries. On macOS we check if the library matches our target + architecture. + ''' + # If not building on macOS for Darwin, do a simple file check + if not env.machines.host.is_darwin() or not env.machines.build.is_darwin(): + for p in paths: + if p.is_file(): + return p + # Run `lipo` and check if the library supports the arch we want + for p in paths: + if not p.is_file(): + continue + archs = mesonlib.darwin_get_object_archs(str(p)) + if archs and env.machines.host.cpu_family in archs: + return p + else: + mlog.debug(f'Rejected {p}, supports {archs} but need {env.machines.host.cpu_family}') + return None + + @functools.lru_cache() + def output_is_64bit(self, env: 'Environment') -> bool: + ''' + returns true if the output produced is 64-bit, false if 32-bit + ''' + return self.sizeof('void *', '', env)[0] == 8 + + def _find_library_real(self, libname: str, env: 'Environment', extra_dirs: T.List[str], code: str, libtype: LibType, lib_prefix_warning: bool) -> T.Optional[T.List[str]]: + # First try if we can just add the library as -l. + # Gcc + co seem to prefer builtin lib dirs to -L dirs. + # Only try to find std libs if no extra dirs specified. + # The built-in search procedure will always favour .so and then always + # search for .a. This is only allowed if libtype is LibType.PREFER_SHARED + if ((not extra_dirs and libtype is LibType.PREFER_SHARED) or + libname in self.internal_libs): + cargs = ['-l' + libname] + largs = self.get_linker_always_args() + self.get_allow_undefined_link_args() + extra_args = cargs + self.linker_to_compiler_args(largs) + + if self.links(code, env, extra_args=extra_args, disable_cache=True)[0]: + return cargs + # Don't do a manual search for internal libs + if libname in self.internal_libs: + return None + # Not found or we want to use a specific libtype? Try to find the + # library file itself. + patterns = self.get_library_naming(env, libtype) + # try to detect if we are 64-bit or 32-bit. If we can't + # detect, we will just skip path validity checks done in + # get_library_dirs() call + try: + if self.output_is_64bit(env): + elf_class = 2 + else: + elf_class = 1 + except (mesonlib.MesonException, KeyError): # TODO evaluate if catching KeyError is wanted here + elf_class = 0 + # Search in the specified dirs, and then in the system libraries + for d in itertools.chain(extra_dirs, self.get_library_dirs(env, elf_class)): + for p in patterns: + trials = self._get_trials_from_pattern(p, d, libname) + if not trials: + continue + trial = self._get_file_from_list(env, trials) + if not trial: + continue + if libname.startswith('lib') and trial.name.startswith(libname) and lib_prefix_warning: + mlog.warning(f'find_library({libname!r}) starting in "lib" only works by accident and is not portable') + return [trial.as_posix()] + return None + + def _find_library_impl(self, libname: str, env: 'Environment', extra_dirs: T.List[str], + code: str, libtype: LibType, lib_prefix_warning: bool) -> T.Optional[T.List[str]]: + # These libraries are either built-in or invalid + if libname in self.ignore_libs: + return [] + if isinstance(extra_dirs, str): + extra_dirs = [extra_dirs] + key = (tuple(self.exelist), libname, tuple(extra_dirs), code, libtype) + if key not in self.find_library_cache: + value = self._find_library_real(libname, env, extra_dirs, code, libtype, lib_prefix_warning) + self.find_library_cache[key] = value + else: + value = self.find_library_cache[key] + if value is None: + return None + return value.copy() + + def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str], + libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]: + code = 'int main(void) { return 0; }\n' + return self._find_library_impl(libname, env, extra_dirs, code, libtype, lib_prefix_warning) + + def find_framework_paths(self, env: 'Environment') -> T.List[str]: + ''' + These are usually /Library/Frameworks and /System/Library/Frameworks, + unless you select a particular macOS SDK with the -isysroot flag. + You can also add to this by setting -F in CFLAGS. + ''' + # TODO: this really needs to be *AppleClang*, not just any clang. + if self.id != 'clang': + raise mesonlib.MesonException('Cannot find framework path with non-clang compiler') + # Construct the compiler command-line + commands = self.get_exelist(ccache=False) + ['-v', '-E', '-'] + commands += self.get_always_args() + # Add CFLAGS/CXXFLAGS/OBJCFLAGS/OBJCXXFLAGS from the env + commands += env.coredata.get_external_args(self.for_machine, self.language) + mlog.debug('Finding framework path by running: ', ' '.join(commands), '\n') + os_env = os.environ.copy() + os_env['LC_ALL'] = 'C' + _, _, stde = mesonlib.Popen_safe(commands, env=os_env, stdin=subprocess.PIPE) + paths: T.List[str] = [] + for line in stde.split('\n'): + if '(framework directory)' not in line: + continue + # line is of the form: + # ` /path/to/framework (framework directory)` + paths.append(line[:-21].strip()) + return paths + + def _find_framework_real(self, name: str, env: 'Environment', extra_dirs: T.List[str], allow_system: bool) -> T.Optional[T.List[str]]: + code = 'int main(void) { return 0; }' + link_args: T.List[str] = [] + for d in extra_dirs: + link_args += ['-F' + d] + # We can pass -Z to disable searching in the system frameworks, but + # then we must also pass -L/usr/lib to pick up libSystem.dylib + extra_args = [] if allow_system else ['-Z', '-L/usr/lib'] + link_args += ['-framework', name] + if self.links(code, env, extra_args=(extra_args + link_args), disable_cache=True)[0]: + return link_args + return None + + def _find_framework_impl(self, name: str, env: 'Environment', extra_dirs: T.List[str], + allow_system: bool) -> T.Optional[T.List[str]]: + if isinstance(extra_dirs, str): + extra_dirs = [extra_dirs] + key = (tuple(self.exelist), name, tuple(extra_dirs), allow_system) + if key in self.find_framework_cache: + value = self.find_framework_cache[key] + else: + value = self._find_framework_real(name, env, extra_dirs, allow_system) + self.find_framework_cache[key] = value + if value is None: + return None + return value.copy() + + def find_framework(self, name: str, env: 'Environment', extra_dirs: T.List[str], + allow_system: bool = True) -> T.Optional[T.List[str]]: + ''' + Finds the framework with the specified name, and returns link args for + the same or returns None when the framework is not found. + ''' + # TODO: should probably check for macOS? + return self._find_framework_impl(name, env, extra_dirs, allow_system) + + def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]: + # TODO: does this belong here or in GnuLike or maybe PosixLike? + return [] + + def get_crt_link_args(self, crt_val: str, buildtype: str) -> T.List[str]: + # TODO: does this belong here or in GnuLike or maybe PosixLike? + return [] + + def thread_flags(self, env: 'Environment') -> T.List[str]: + # TODO: does this belong here or in GnuLike or maybe PosixLike? + host_m = env.machines[self.for_machine] + if host_m.is_haiku() or host_m.is_darwin(): + return [] + return ['-pthread'] + + def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]: + return args.copy() + + def has_arguments(self, args: T.List[str], env: 'Environment', code: str, + mode: CompileCheckMode) -> T.Tuple[bool, bool]: + return self.compiles(code, env, extra_args=args, mode=mode) + + def _has_multi_arguments(self, args: T.List[str], env: 'Environment', code: str) -> T.Tuple[bool, bool]: + new_args: T.List[str] = [] + for arg in args: + # some compilers, e.g. GCC, don't warn for unsupported warning-disable + # flags, so when we are testing a flag like "-Wno-forgotten-towel", also + # check the equivalent enable flag too "-Wforgotten-towel" + if arg.startswith('-Wno-'): + new_args.append('-W' + arg[5:]) + if arg.startswith('-Wl,'): + mlog.warning(f'{arg} looks like a linker argument, ' + 'but has_argument and other similar methods only ' + 'support checking compiler arguments. Using them ' + 'to check linker arguments are never supported, ' + 'and results are likely to be wrong regardless of ' + 'the compiler you are using. has_link_argument or ' + 'other similar method can be used instead.') + new_args.append(arg) + return self.has_arguments(new_args, env, code, mode=CompileCheckMode.COMPILE) + + def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + return self._has_multi_arguments(args, env, 'extern int i;\nint i;\n') + + def _has_multi_link_arguments(self, args: T.List[str], env: 'Environment', code: str) -> T.Tuple[bool, bool]: + # First time we check for link flags we need to first check if we have + # --fatal-warnings, otherwise some linker checks could give some + # false positive. + args = self.linker.fatal_warnings() + args + args = self.linker_to_compiler_args(args) + return self.has_arguments(args, env, code, mode=CompileCheckMode.LINK) + + def has_multi_link_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]: + return self._has_multi_link_arguments(args, env, 'int main(void) { return 0; }\n') + + @staticmethod + def _concatenate_string_literals(s: str) -> str: + pattern = re.compile(r'(?P
.*([^\\]")|^")(?P([^\\"]|\\.)*)"\s+"(?P([^\\"]|\\.)*)(?P".*)')
+        ret = s
+        m = pattern.match(ret)
+        while m:
+            ret = ''.join(m.group('pre', 'str1', 'str2', 'post'))
+            m = pattern.match(ret)
+        return ret
+
+    def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]:
+        # Most compilers (such as GCC and Clang) only warn about unknown or
+        # ignored attributes, so force an error. Overridden in GCC and Clang
+        # mixins.
+        return ['-Werror']
+
+    def has_func_attribute(self, name: str, env: 'Environment') -> T.Tuple[bool, bool]:
+        # Just assume that if we're not on windows that dllimport and dllexport
+        # don't work
+        m = env.machines[self.for_machine]
+        if not (m.is_windows() or m.is_cygwin()):
+            if name in {'dllimport', 'dllexport'}:
+                return False, False
+
+        return self.compiles(self.attribute_check_func(name), env,
+                             extra_args=self.get_has_func_attribute_extra_args(name))
+
+    def get_assert_args(self, disable: bool) -> T.List[str]:
+        if disable:
+            return ['-DNDEBUG']
+        return []
+
+    @functools.lru_cache(maxsize=None)
+    def can_compile(self, src: 'mesonlib.FileOrString') -> bool:
+        # Files we preprocess can be anything, e.g. .in
+        if self.mode == CompileCheckMode.PREPROCESS:
+            return True
+        return super().can_compile(src)
+
+    def get_preprocessor(self) -> Compiler:
+        if not self.preprocessor:
+            self.preprocessor = copy.copy(self)
+            self.preprocessor.exelist = self.exelist + self.get_preprocess_to_file_args()
+            self.preprocessor.mode = CompileCheckMode.PREPROCESS
+            self.modes.append(self.preprocessor)
+        return self.preprocessor
diff --git a/vendored-meson/meson/mesonbuild/compilers/mixins/compcert.py b/vendored-meson/meson/mesonbuild/compilers/mixins/compcert.py
new file mode 100644
index 000000000000..ac4d5aaa07d5
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/compilers/mixins/compcert.py
@@ -0,0 +1,139 @@
+# Copyright 2012-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Representations specific to the CompCert C compiler family."""
+
+import os
+import re
+import typing as T
+
+if T.TYPE_CHECKING:
+    from envconfig import MachineInfo
+    from ...environment import Environment
+    from ...compilers.compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+ccomp_buildtype_args: T.Dict[str, T.List[str]] = {
+    'plain': [''],
+    'debug': ['-O0', '-g'],
+    'debugoptimized': ['-O0', '-g'],
+    'release': ['-O3'],
+    'minsize': ['-Os'],
+    'custom': ['-Obranchless'],
+}
+
+ccomp_optimization_args: T.Dict[str, T.List[str]] = {
+    'plain': [],
+    '0': ['-O0'],
+    'g': ['-O0'],
+    '1': ['-O1'],
+    '2': ['-O2'],
+    '3': ['-O3'],
+    's': ['-Os']
+}
+
+ccomp_debug_args: T.Dict[bool, T.List[str]] = {
+    False: [],
+    True: ['-g']
+}
+
+# As of CompCert 20.04, these arguments should be passed to the underlying gcc linker (via -WUl,)
+# There are probably (many) more, but these are those used by picolibc
+ccomp_args_to_wul: T.List[str] = [
+        r"^-ffreestanding$",
+        r"^-r$"
+]
+
+class CompCertCompiler(Compiler):
+
+    id = 'ccomp'
+
+    def __init__(self) -> None:
+        # Assembly
+        self.can_compile_suffixes.add('s')
+        self.can_compile_suffixes.add('sx')
+        default_warn_args: T.List[str] = []
+        self.warn_args: T.Dict[str, T.List[str]] = {
+            '0': [],
+            '1': default_warn_args,
+            '2': default_warn_args + [],
+            '3': default_warn_args + [],
+            'everything': default_warn_args + []}
+
+    def get_always_args(self) -> T.List[str]:
+        return []
+
+    def get_pic_args(self) -> T.List[str]:
+        # As of now, CompCert does not support PIC
+        return []
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return ccomp_buildtype_args[buildtype]
+
+    def get_pch_suffix(self) -> str:
+        return 'pch'
+
+    def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+        return []
+
+    @classmethod
+    def _unix_args_to_native(cls, args: T.List[str], info: MachineInfo) -> T.List[str]:
+        "Always returns a copy that can be independently mutated"
+        patched_args: T.List[str] = []
+        for arg in args:
+            added = 0
+            for ptrn in ccomp_args_to_wul:
+                if re.match(ptrn, arg):
+                    patched_args.append('-WUl,' + arg)
+                    added = 1
+            if not added:
+                patched_args.append(arg)
+        return patched_args
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def get_preprocess_only_args(self) -> T.List[str]:
+        return ['-E']
+
+    def get_compile_only_args(self) -> T.List[str]:
+        return ['-c']
+
+    def get_coverage_args(self) -> T.List[str]:
+        return []
+
+    def get_no_stdinc_args(self) -> T.List[str]:
+        return ['-nostdinc']
+
+    def get_no_stdlib_link_args(self) -> T.List[str]:
+        return ['-nostdlib']
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return ccomp_optimization_args[optimization_level]
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return ccomp_debug_args[is_debug]
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:9] == '-I':
+                parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+
+        return parameter_list
diff --git a/vendored-meson/meson/mesonbuild/compilers/mixins/elbrus.py b/vendored-meson/meson/mesonbuild/compilers/mixins/elbrus.py
new file mode 100644
index 000000000000..ad6b7ca05511
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/compilers/mixins/elbrus.py
@@ -0,0 +1,101 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Abstractions for the Elbrus family of compilers."""
+
+import os
+import typing as T
+import subprocess
+import re
+
+from .gnu import GnuLikeCompiler
+from .gnu import gnu_optimization_args
+from ...mesonlib import Popen_safe, OptionKey
+
+if T.TYPE_CHECKING:
+    from ...environment import Environment
+    from ...coredata import KeyedOptionDictType
+
+
+class ElbrusCompiler(GnuLikeCompiler):
+    # Elbrus compiler is nearly like GCC, but does not support
+    # PCH, LTO, sanitizers and color output as of version 1.21.x.
+
+    id = 'lcc'
+
+    def __init__(self) -> None:
+        super().__init__()
+        self.base_options = {OptionKey(o) for o in ['b_pgo', 'b_coverage', 'b_ndebug', 'b_staticpic', 'b_lundef', 'b_asneeded']}
+        default_warn_args = ['-Wall']
+        self.warn_args = {'0': [],
+                          '1': default_warn_args,
+                          '2': default_warn_args + ['-Wextra'],
+                          '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+                          'everything': default_warn_args + ['-Wextra', '-Wpedantic']}
+
+    # FIXME: use _build_wrapper to call this so that linker flags from the env
+    # get applied
+    def get_library_dirs(self, env: 'Environment', elf_class: T.Optional[int] = None) -> T.List[str]:
+        os_env = os.environ.copy()
+        os_env['LC_ALL'] = 'C'
+        stdo = Popen_safe(self.get_exelist(ccache=False) + ['--print-search-dirs'], env=os_env)[1]
+        for line in stdo.split('\n'):
+            if line.startswith('libraries:'):
+                # lcc does not include '=' in --print-search-dirs output. Also it could show nonexistent dirs.
+                libstr = line.split(' ', 1)[1]
+                return [os.path.realpath(p) for p in libstr.split(':') if os.path.exists(p)]
+        return []
+
+    def get_program_dirs(self, env: 'Environment') -> T.List[str]:
+        os_env = os.environ.copy()
+        os_env['LC_ALL'] = 'C'
+        stdo = Popen_safe(self.get_exelist(ccache=False) + ['--print-search-dirs'], env=os_env)[1]
+        for line in stdo.split('\n'):
+            if line.startswith('programs:'):
+                # lcc does not include '=' in --print-search-dirs output.
+                libstr = line.split(' ', 1)[1]
+                return [os.path.realpath(p) for p in libstr.split(':')]
+        return []
+
+    def get_default_include_dirs(self) -> T.List[str]:
+        os_env = os.environ.copy()
+        os_env['LC_ALL'] = 'C'
+        p = subprocess.Popen(self.get_exelist(ccache=False) + ['-xc', '-E', '-v', '-'], env=os_env, stdin=subprocess.DEVNULL, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        stderr = p.stderr.read().decode('utf-8', errors='replace')
+        includes: T.List[str] = []
+        for line in stderr.split('\n'):
+            if line.lstrip().startswith('--sys_include'):
+                includes.append(re.sub(r'\s*\\$', '', re.sub(r'^\s*--sys_include\s*', '', line)))
+        return includes
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return gnu_optimization_args[optimization_level]
+
+    def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.List[str]:
+        return ['-r', '-nodefaultlibs', '-nostartfiles', '-o', prelink_name] + obj_list
+
+    def get_pch_suffix(self) -> str:
+        # Actually it's not supported for now, but probably will be supported in future
+        return 'pch'
+
+    def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        args: T.List[str] = []
+        std = options[OptionKey('std', lang=self.language, machine=self.for_machine)]
+        if std.value != 'none':
+            args.append('-std=' + std.value)
+        return args
+
+    def openmp_flags(self) -> T.List[str]:
+        return ['-fopenmp']
diff --git a/vendored-meson/meson/mesonbuild/compilers/mixins/emscripten.py b/vendored-meson/meson/mesonbuild/compilers/mixins/emscripten.py
new file mode 100644
index 000000000000..fef22b9073ba
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/compilers/mixins/emscripten.py
@@ -0,0 +1,100 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Provides a mixin for shared code between C and C++ Emscripten compilers."""
+
+import os.path
+import typing as T
+
+from ... import coredata
+from ... import mesonlib
+from ...mesonlib import OptionKey
+from ...mesonlib import LibType
+from mesonbuild.compilers.compilers import CompileCheckMode
+
+if T.TYPE_CHECKING:
+    from ...environment import Environment
+    from ...compilers.compilers import Compiler
+    from ...dependencies import Dependency
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+
+def wrap_js_includes(args: T.List[str]) -> T.List[str]:
+    final_args: T.List[str] = []
+    for i in args:
+        if i.endswith('.js') and not i.startswith('-'):
+            final_args += ['--js-library', i]
+        else:
+            final_args += [i]
+    return final_args
+
+class EmscriptenMixin(Compiler):
+
+    def _get_compile_output(self, dirname: str, mode: CompileCheckMode) -> str:
+        assert mode != CompileCheckMode.PREPROCESS, 'In pre-processor mode, the output is sent to stdout and discarded'
+        # Unlike sane toolchains, emcc infers the kind of output from its name.
+        # This is the only reason why this method is overridden; compiler tests
+        # do not work well with the default exe/obj suffices.
+        if mode == CompileCheckMode.LINK:
+            suffix = 'js'
+        else:
+            suffix = 'o'
+        return os.path.join(dirname, 'output.' + suffix)
+
+    def thread_link_flags(self, env: 'Environment') -> T.List[str]:
+        args = ['-pthread']
+        count: int = env.coredata.options[OptionKey('thread_count', lang=self.language, machine=self.for_machine)].value
+        if count:
+            args.append(f'-sPTHREAD_POOL_SIZE={count}')
+        return args
+
+    def get_options(self) -> 'coredata.MutableKeyedOptionDictType':
+        opts = super().get_options()
+        key = OptionKey('thread_count', machine=self.for_machine, lang=self.language)
+        opts.update({
+            key: coredata.UserIntegerOption(
+                'Number of threads to use in web assembly, set to 0 to disable',
+                (0, None, 4),  # Default was picked at random
+            ),
+        })
+
+        return opts
+
+    @classmethod
+    def native_args_to_unix(cls, args: T.List[str]) -> T.List[str]:
+        return wrap_js_includes(super().native_args_to_unix(args))
+
+    def get_dependency_link_args(self, dep: 'Dependency') -> T.List[str]:
+        return wrap_js_includes(super().get_dependency_link_args(dep))
+
+    def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
+                     libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]:
+        if not libname.endswith('.js'):
+            return super().find_library(libname, env, extra_dirs, libtype, lib_prefix_warning)
+        if os.path.isabs(libname):
+            if os.path.exists(libname):
+                return [libname]
+        if len(extra_dirs) == 0:
+            raise mesonlib.EnvironmentException('Looking up Emscripten JS libraries requires either an absolute path or specifying extra_dirs.')
+        for d in extra_dirs:
+            abs_path = os.path.join(d, libname)
+            if os.path.exists(abs_path):
+                return [abs_path]
+        return None
diff --git a/vendored-meson/meson/mesonbuild/compilers/mixins/gnu.py b/vendored-meson/meson/mesonbuild/compilers/mixins/gnu.py
new file mode 100644
index 000000000000..2b187327b8d7
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/compilers/mixins/gnu.py
@@ -0,0 +1,654 @@
+# Copyright 2019-2022 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Provides mixins for GNU compilers and GNU-like compilers."""
+
+import abc
+import functools
+import os
+import multiprocessing
+import pathlib
+import re
+import subprocess
+import typing as T
+
+from ... import mesonlib
+from ... import mlog
+from ...mesonlib import OptionKey
+from mesonbuild.compilers.compilers import CompileCheckMode
+
+if T.TYPE_CHECKING:
+    from ..._typing import ImmutableListProtocol
+    from ...environment import Environment
+    from ..compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+# XXX: prevent circular references.
+# FIXME: this really is a posix interface not a c-like interface
+clike_debug_args: T.Dict[bool, T.List[str]] = {
+    False: [],
+    True: ['-g'],
+}
+
+gnulike_buildtype_args: T.Dict[str, T.List[str]] = {
+    'plain': [],
+    'debug': [],
+    'debugoptimized': [],
+    'release': [],
+    'minsize': [],
+    'custom': [],
+}
+
+gnu_optimization_args: T.Dict[str, T.List[str]] = {
+    'plain': [],
+    '0': ['-O0'],
+    'g': ['-Og'],
+    '1': ['-O1'],
+    '2': ['-O2'],
+    '3': ['-O3'],
+    's': ['-Os'],
+}
+
+gnulike_instruction_set_args: T.Dict[str, T.List[str]] = {
+    'mmx': ['-mmmx'],
+    'sse': ['-msse'],
+    'sse2': ['-msse2'],
+    'sse3': ['-msse3'],
+    'ssse3': ['-mssse3'],
+    'sse41': ['-msse4.1'],
+    'sse42': ['-msse4.2'],
+    'avx': ['-mavx'],
+    'avx2': ['-mavx2'],
+    'neon': ['-mfpu=neon'],
+}
+
+gnu_symbol_visibility_args: T.Dict[str, T.List[str]] = {
+    '': [],
+    'default': ['-fvisibility=default'],
+    'internal': ['-fvisibility=internal'],
+    'hidden': ['-fvisibility=hidden'],
+    'protected': ['-fvisibility=protected'],
+    'inlineshidden': ['-fvisibility=hidden', '-fvisibility-inlines-hidden'],
+}
+
+gnu_color_args: T.Dict[str, T.List[str]] = {
+    'auto': ['-fdiagnostics-color=auto'],
+    'always': ['-fdiagnostics-color=always'],
+    'never': ['-fdiagnostics-color=never'],
+}
+
+# Warnings collected from the GCC source and documentation.  This is an
+# objective set of all the warnings flags that apply to general projects: the
+# only ones omitted are those that require a project-specific value, or are
+# related to non-standard or legacy language support.  This behaves roughly
+# like -Weverything in clang.  Warnings implied by -Wall, -Wextra, or
+# higher-level warnings already enabled here are not included in these lists to
+# keep them as short as possible.  History goes back to GCC 3.0.0, everything
+# earlier is considered historical and listed under version 0.0.0.
+
+# GCC warnings for all C-family languages
+# Omitted non-general warnings:
+#   -Wabi=
+#   -Waggregate-return
+#   -Walloc-size-larger-than=BYTES
+#   -Walloca-larger-than=BYTES
+#   -Wframe-larger-than=BYTES
+#   -Wlarger-than=BYTES
+#   -Wstack-usage=BYTES
+#   -Wsystem-headers
+#   -Wtrampolines
+#   -Wvla-larger-than=BYTES
+#
+# Omitted warnings enabled elsewhere in meson:
+#   -Winvalid-pch (GCC 3.4.0)
+gnu_common_warning_args: T.Dict[str, T.List[str]] = {
+    "0.0.0": [
+        "-Wcast-qual",
+        "-Wconversion",
+        "-Wfloat-equal",
+        "-Wformat=2",
+        "-Winline",
+        "-Wmissing-declarations",
+        "-Wredundant-decls",
+        "-Wshadow",
+        "-Wundef",
+        "-Wuninitialized",
+        "-Wwrite-strings",
+    ],
+    "3.0.0": [
+        "-Wdisabled-optimization",
+        "-Wpacked",
+        "-Wpadded",
+    ],
+    "3.3.0": [
+        "-Wmultichar",
+        "-Wswitch-default",
+        "-Wswitch-enum",
+        "-Wunused-macros",
+    ],
+    "4.0.0": [
+        "-Wmissing-include-dirs",
+    ],
+    "4.1.0": [
+        "-Wunsafe-loop-optimizations",
+        "-Wstack-protector",
+    ],
+    "4.2.0": [
+        "-Wstrict-overflow=5",
+    ],
+    "4.3.0": [
+        "-Warray-bounds=2",
+        "-Wlogical-op",
+        "-Wstrict-aliasing=3",
+        "-Wvla",
+    ],
+    "4.6.0": [
+        "-Wdouble-promotion",
+        "-Wsuggest-attribute=const",
+        "-Wsuggest-attribute=noreturn",
+        "-Wsuggest-attribute=pure",
+        "-Wtrampolines",
+    ],
+    "4.7.0": [
+        "-Wvector-operation-performance",
+    ],
+    "4.8.0": [
+        "-Wsuggest-attribute=format",
+    ],
+    "4.9.0": [
+        "-Wdate-time",
+    ],
+    "5.1.0": [
+        "-Wformat-signedness",
+        "-Wnormalized=nfc",
+    ],
+    "6.1.0": [
+        "-Wduplicated-cond",
+        "-Wnull-dereference",
+        "-Wshift-negative-value",
+        "-Wshift-overflow=2",
+        "-Wunused-const-variable=2",
+    ],
+    "7.1.0": [
+        "-Walloca",
+        "-Walloc-zero",
+        "-Wformat-overflow=2",
+        "-Wformat-truncation=2",
+        "-Wstringop-overflow=3",
+    ],
+    "7.2.0": [
+        "-Wduplicated-branches",
+    ],
+    "8.1.0": [
+        "-Wcast-align=strict",
+        "-Wsuggest-attribute=cold",
+        "-Wsuggest-attribute=malloc",
+    ],
+    "9.1.0": [
+        "-Wattribute-alias=2",
+    ],
+    "10.1.0": [
+        "-Wanalyzer-too-complex",
+        "-Warith-conversion",
+    ],
+    "12.1.0": [
+        "-Wbidi-chars=ucn",
+        "-Wopenacc-parallelism",
+        "-Wtrivial-auto-var-init",
+    ],
+}
+
+# GCC warnings for C
+# Omitted non-general or legacy warnings:
+#   -Wc11-c2x-compat
+#   -Wc90-c99-compat
+#   -Wc99-c11-compat
+#   -Wdeclaration-after-statement
+#   -Wtraditional
+#   -Wtraditional-conversion
+gnu_c_warning_args: T.Dict[str, T.List[str]] = {
+    "0.0.0": [
+        "-Wbad-function-cast",
+        "-Wmissing-prototypes",
+        "-Wnested-externs",
+        "-Wstrict-prototypes",
+    ],
+    "3.4.0": [
+        "-Wold-style-definition",
+        "-Winit-self",
+    ],
+    "4.1.0": [
+        "-Wc++-compat",
+    ],
+    "4.5.0": [
+        "-Wunsuffixed-float-constants",
+    ],
+}
+
+# GCC warnings for C++
+# Omitted non-general or legacy warnings:
+#   -Wc++0x-compat
+#   -Wc++1z-compat
+#   -Wc++2a-compat
+#   -Wctad-maybe-unsupported
+#   -Wnamespaces
+#   -Wtemplates
+gnu_cpp_warning_args: T.Dict[str, T.List[str]] = {
+    "0.0.0": [
+        "-Wctor-dtor-privacy",
+        "-Weffc++",
+        "-Wnon-virtual-dtor",
+        "-Wold-style-cast",
+        "-Woverloaded-virtual",
+        "-Wsign-promo",
+    ],
+    "4.0.1": [
+        "-Wstrict-null-sentinel",
+    ],
+    "4.6.0": [
+        "-Wnoexcept",
+    ],
+    "4.7.0": [
+        "-Wzero-as-null-pointer-constant",
+    ],
+    "4.8.0": [
+        "-Wabi-tag",
+        "-Wuseless-cast",
+    ],
+    "4.9.0": [
+        "-Wconditionally-supported",
+    ],
+    "5.1.0": [
+        "-Wsuggest-final-methods",
+        "-Wsuggest-final-types",
+        "-Wsuggest-override",
+    ],
+    "6.1.0": [
+        "-Wmultiple-inheritance",
+        "-Wplacement-new=2",
+        "-Wvirtual-inheritance",
+    ],
+    "7.1.0": [
+        "-Waligned-new=all",
+        "-Wnoexcept-type",
+        "-Wregister",
+    ],
+    "8.1.0": [
+        "-Wcatch-value=3",
+        "-Wextra-semi",
+    ],
+    "9.1.0": [
+        "-Wdeprecated-copy-dtor",
+        "-Wredundant-move",
+    ],
+    "10.1.0": [
+        "-Wcomma-subscript",
+        "-Wmismatched-tags",
+        "-Wredundant-tags",
+        "-Wvolatile",
+    ],
+    "11.1.0": [
+        "-Wdeprecated-enum-enum-conversion",
+        "-Wdeprecated-enum-float-conversion",
+        "-Winvalid-imported-macros",
+    ],
+}
+
+# GCC warnings for Objective C and Objective C++
+# Omitted non-general or legacy warnings:
+#   -Wtraditional
+#   -Wtraditional-conversion
+gnu_objc_warning_args: T.Dict[str, T.List[str]] = {
+    "0.0.0": [
+        "-Wselector",
+    ],
+    "3.3": [
+        "-Wundeclared-selector",
+    ],
+    "4.1.0": [
+        "-Wassign-intercept",
+        "-Wstrict-selector-match",
+    ],
+}
+
+_LANG_MAP = {
+    'c': 'c',
+    'cpp': 'c++',
+    'objc': 'objective-c',
+    'objcpp': 'objective-c++'
+}
+
+@functools.lru_cache(maxsize=None)
+def gnulike_default_include_dirs(compiler: T.Tuple[str, ...], lang: str) -> 'ImmutableListProtocol[str]':
+    if lang not in _LANG_MAP:
+        return []
+    lang = _LANG_MAP[lang]
+    env = os.environ.copy()
+    env["LC_ALL"] = 'C'
+    cmd = list(compiler) + [f'-x{lang}', '-E', '-v', '-']
+    _, stdout, _ = mesonlib.Popen_safe(cmd, stderr=subprocess.STDOUT, env=env)
+    parse_state = 0
+    paths: T.List[str] = []
+    for line in stdout.split('\n'):
+        line = line.strip(' \n\r\t')
+        if parse_state == 0:
+            if line == '#include "..." search starts here:':
+                parse_state = 1
+        elif parse_state == 1:
+            if line == '#include <...> search starts here:':
+                parse_state = 2
+            else:
+                paths.append(line)
+        elif parse_state == 2:
+            if line == 'End of search list.':
+                break
+            else:
+                paths.append(line)
+    if not paths:
+        mlog.warning('No include directory found parsing "{cmd}" output'.format(cmd=" ".join(cmd)))
+    # Append a normalized copy of paths to make path lookup easier
+    paths += [os.path.normpath(x) for x in paths]
+    return paths
+
+
+class GnuLikeCompiler(Compiler, metaclass=abc.ABCMeta):
+    """
+    GnuLikeCompiler is a common interface to all compilers implementing
+    the GNU-style commandline interface. This includes GCC, Clang
+    and ICC. Certain functionality between them is different and requires
+    that the actual concrete subclass define their own implementation.
+    """
+
+    LINKER_PREFIX = '-Wl,'
+
+    def __init__(self) -> None:
+        self.base_options = {
+            OptionKey(o) for o in ['b_pch', 'b_lto', 'b_pgo', 'b_coverage',
+                                   'b_ndebug', 'b_staticpic', 'b_pie']}
+        if not (self.info.is_windows() or self.info.is_cygwin() or self.info.is_openbsd()):
+            self.base_options.add(OptionKey('b_lundef'))
+        if not self.info.is_windows() or self.info.is_cygwin():
+            self.base_options.add(OptionKey('b_asneeded'))
+        if not self.info.is_hurd():
+            self.base_options.add(OptionKey('b_sanitize'))
+        # All GCC-like backends can do assembly
+        self.can_compile_suffixes.add('s')
+        self.can_compile_suffixes.add('sx')
+
+    def get_pic_args(self) -> T.List[str]:
+        if self.info.is_windows() or self.info.is_cygwin() or self.info.is_darwin():
+            return [] # On Window and OS X, pic is always on.
+        return ['-fPIC']
+
+    def get_pie_args(self) -> T.List[str]:
+        return ['-fPIE']
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return gnulike_buildtype_args[buildtype]
+
+    @abc.abstractmethod
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        pass
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return clike_debug_args[is_debug]
+
+    @abc.abstractmethod
+    def get_pch_suffix(self) -> str:
+        pass
+
+    def split_shlib_to_parts(self, fname: str) -> T.Tuple[str, str]:
+        return os.path.dirname(fname), fname
+
+    def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]:
+        return gnulike_instruction_set_args.get(instruction_set, None)
+
+    def get_default_include_dirs(self) -> T.List[str]:
+        return gnulike_default_include_dirs(tuple(self.get_exelist(ccache=False)), self.language).copy()
+
+    @abc.abstractmethod
+    def openmp_flags(self) -> T.List[str]:
+        pass
+
+    def gnu_symbol_visibility_args(self, vistype: str) -> T.List[str]:
+        if vistype == 'inlineshidden' and self.language not in {'cpp', 'objcpp'}:
+            vistype = 'hidden'
+        return gnu_symbol_visibility_args[vistype]
+
+    def gen_vs_module_defs_args(self, defsfile: str) -> T.List[str]:
+        if not isinstance(defsfile, str):
+            raise RuntimeError('Module definitions file should be str')
+        # On Windows targets, .def files may be specified on the linker command
+        # line like an object file.
+        if self.info.is_windows() or self.info.is_cygwin():
+            return [defsfile]
+        # For other targets, discard the .def file.
+        return []
+
+    def get_argument_syntax(self) -> str:
+        return 'gcc'
+
+    def get_profile_generate_args(self) -> T.List[str]:
+        return ['-fprofile-generate']
+
+    def get_profile_use_args(self) -> T.List[str]:
+        return ['-fprofile-use']
+
+    def get_gui_app_args(self, value: bool) -> T.List[str]:
+        return ['-mwindows' if value else '-mconsole']
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:2] == '-I' or i[:2] == '-L':
+                parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+        return parameter_list
+
+    @functools.lru_cache()
+    def _get_search_dirs(self, env: 'Environment') -> str:
+        extra_args = ['--print-search-dirs']
+        with self._build_wrapper('', env, extra_args=extra_args,
+                                 dependencies=None, mode=CompileCheckMode.COMPILE,
+                                 want_output=True) as p:
+            return p.stdout
+
+    def _split_fetch_real_dirs(self, pathstr: str) -> T.List[str]:
+        # We need to use the path separator used by the compiler for printing
+        # lists of paths ("gcc --print-search-dirs"). By default
+        # we assume it uses the platform native separator.
+        pathsep = os.pathsep
+
+        # clang uses ':' instead of ';' on Windows https://reviews.llvm.org/D61121
+        # so we need to repair things like 'C:\foo:C:\bar'
+        if pathsep == ';':
+            pathstr = re.sub(r':([^/\\])', r';\1', pathstr)
+
+        # pathlib treats empty paths as '.', so filter those out
+        paths = [p for p in pathstr.split(pathsep) if p]
+
+        result: T.List[str] = []
+        for p in paths:
+            # GCC returns paths like this:
+            # /usr/lib/gcc/x86_64-linux-gnu/8/../../../../x86_64-linux-gnu/lib
+            # It would make sense to normalize them to get rid of the .. parts
+            # Sadly when you are on a merged /usr fs it also kills these:
+            # /lib/x86_64-linux-gnu
+            # since /lib is a symlink to /usr/lib. This would mean
+            # paths under /lib would be considered not a "system path",
+            # which is wrong and breaks things. Store everything, just to be sure.
+            pobj = pathlib.Path(p)
+            unresolved = pobj.as_posix()
+            if pobj.exists():
+                if unresolved not in result:
+                    result.append(unresolved)
+                try:
+                    resolved = pathlib.Path(p).resolve().as_posix()
+                    if resolved not in result:
+                        result.append(resolved)
+                except FileNotFoundError:
+                    pass
+        return result
+
+    def get_compiler_dirs(self, env: 'Environment', name: str) -> T.List[str]:
+        '''
+        Get dirs from the compiler, either `libraries:` or `programs:`
+        '''
+        stdo = self._get_search_dirs(env)
+        for line in stdo.split('\n'):
+            if line.startswith(name + ':'):
+                return self._split_fetch_real_dirs(line.split('=', 1)[1])
+        return []
+
+    def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]:
+        # This provides a base for many compilers, GCC and Clang override this
+        # for their specific arguments
+        return ['-flto']
+
+    def sanitizer_compile_args(self, value: str) -> T.List[str]:
+        if value == 'none':
+            return []
+        args = ['-fsanitize=' + value]
+        if 'address' in value:  # for -fsanitize=address,undefined
+            args.append('-fno-omit-frame-pointer')
+        return args
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return ['-o', outputname]
+
+    def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+        return ['-MD', '-MQ', outtarget, '-MF', outfile]
+
+    def get_compile_only_args(self) -> T.List[str]:
+        return ['-c']
+
+    def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+        if not path:
+            path = '.'
+        if is_system:
+            return ['-isystem' + path]
+        return ['-I' + path]
+
+    @classmethod
+    def use_linker_args(cls, linker: str, version: str) -> T.List[str]:
+        if linker not in {'gold', 'bfd', 'lld'}:
+            raise mesonlib.MesonException(
+                f'Unsupported linker, only bfd, gold, and lld are supported, not {linker}.')
+        return [f'-fuse-ld={linker}']
+
+    def get_coverage_args(self) -> T.List[str]:
+        return ['--coverage']
+
+    def get_preprocess_to_file_args(self) -> T.List[str]:
+        # We want to allow preprocessing files with any extension, such as
+        # foo.c.in. In that case we need to tell GCC/CLANG to treat them as
+        # assembly file.
+        lang = _LANG_MAP.get(self.language, 'assembler-with-cpp')
+        return self.get_preprocess_only_args() + [f'-x{lang}']
+
+
+class GnuCompiler(GnuLikeCompiler):
+    """
+    GnuCompiler represents an actual GCC in its many incarnations.
+    Compilers imitating GCC (Clang/Intel) should use the GnuLikeCompiler ABC.
+    """
+    id = 'gcc'
+
+    def __init__(self, defines: T.Optional[T.Dict[str, str]]):
+        super().__init__()
+        self.defines = defines or {}
+        self.base_options.update({OptionKey('b_colorout'), OptionKey('b_lto_threads')})
+
+    def get_colorout_args(self, colortype: str) -> T.List[str]:
+        if mesonlib.version_compare(self.version, '>=4.9.0'):
+            return gnu_color_args[colortype][:]
+        return []
+
+    def get_warn_args(self, level: str) -> T.List[str]:
+        # Mypy doesn't understand cooperative inheritance
+        args = super().get_warn_args(level)
+        if mesonlib.version_compare(self.version, '<4.8.0') and '-Wpedantic' in args:
+            # -Wpedantic was added in 4.8.0
+            # https://gcc.gnu.org/gcc-4.8/changes.html
+            args[args.index('-Wpedantic')] = '-pedantic'
+        return args
+
+    def supported_warn_args(self, warn_args_by_version: T.Dict[str, T.List[str]]) -> T.List[str]:
+        result: T.List[str] = []
+        for version, warn_args in warn_args_by_version.items():
+            if mesonlib.version_compare(self.version, '>=' + version):
+                result += warn_args
+        return result
+
+    def has_builtin_define(self, define: str) -> bool:
+        return define in self.defines
+
+    def get_builtin_define(self, define: str) -> T.Optional[str]:
+        if define in self.defines:
+            return self.defines[define]
+        return None
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return gnu_optimization_args[optimization_level]
+
+    def get_pch_suffix(self) -> str:
+        return 'gch'
+
+    def openmp_flags(self) -> T.List[str]:
+        return ['-fopenmp']
+
+    def has_arguments(self, args: T.List[str], env: 'Environment', code: str,
+                      mode: CompileCheckMode) -> T.Tuple[bool, bool]:
+        # For some compiler command line arguments, the GNU compilers will
+        # emit a warning on stderr indicating that an option is valid for a
+        # another language, but still complete with exit_success
+        with self._build_wrapper(code, env, args, None, mode) as p:
+            result = p.returncode == 0
+            if self.language in {'cpp', 'objcpp'} and 'is valid for C/ObjC' in p.stderr:
+                result = False
+            if self.language in {'c', 'objc'} and 'is valid for C++/ObjC++' in p.stderr:
+                result = False
+        return result, p.cached
+
+    def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]:
+        # GCC only warns about unknown or ignored attributes, so force an
+        # error.
+        return ['-Werror=attributes']
+
+    def get_prelink_args(self, prelink_name: str, obj_list: T.List[str]) -> T.List[str]:
+        return ['-r', '-o', prelink_name] + obj_list
+
+    def get_lto_compile_args(self, *, threads: int = 0, mode: str = 'default') -> T.List[str]:
+        if threads == 0:
+            if mesonlib.version_compare(self.version, '>= 10.0'):
+                return ['-flto=auto']
+            # This matches clang's behavior of using the number of cpus
+            return [f'-flto={multiprocessing.cpu_count()}']
+        elif threads > 0:
+            return [f'-flto={threads}']
+        return super().get_lto_compile_args(threads=threads)
+
+    @classmethod
+    def use_linker_args(cls, linker: str, version: str) -> T.List[str]:
+        if linker == 'mold' and mesonlib.version_compare(version, '>=12.0.1'):
+            return ['-fuse-ld=mold']
+        return super().use_linker_args(linker, version)
+
+    def get_profile_use_args(self) -> T.List[str]:
+        return super().get_profile_use_args() + ['-fprofile-correction']
diff --git a/vendored-meson/meson/mesonbuild/compilers/mixins/intel.py b/vendored-meson/meson/mesonbuild/compilers/mixins/intel.py
new file mode 100644
index 000000000000..9af05e01cdf9
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/compilers/mixins/intel.py
@@ -0,0 +1,185 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Abstractions for the Intel Compiler families.
+
+Intel provides both a posix/gcc-like compiler (ICC) for MacOS and Linux,
+with Meson mixin IntelGnuLikeCompiler.
+For Windows, the Intel msvc-like compiler (ICL) Meson mixin
+is IntelVisualStudioLikeCompiler.
+"""
+
+import os
+import typing as T
+
+from ... import mesonlib
+from ..compilers import CompileCheckMode
+from .gnu import GnuLikeCompiler
+from .visualstudio import VisualStudioLikeCompiler
+
+# XXX: avoid circular dependencies
+# TODO: this belongs in a posix compiler class
+# NOTE: the default Intel optimization is -O2, unlike GNU which defaults to -O0.
+# this can be surprising, particularly for debug builds, so we specify the
+# default as -O0.
+# https://software.intel.com/en-us/cpp-compiler-developer-guide-and-reference-o
+# https://software.intel.com/en-us/cpp-compiler-developer-guide-and-reference-g
+# https://software.intel.com/en-us/fortran-compiler-developer-guide-and-reference-o
+# https://software.intel.com/en-us/fortran-compiler-developer-guide-and-reference-g
+# https://software.intel.com/en-us/fortran-compiler-developer-guide-and-reference-traceback
+# https://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html
+
+
+class IntelGnuLikeCompiler(GnuLikeCompiler):
+    """
+    Tested on linux for ICC 14.0.3, 15.0.6, 16.0.4, 17.0.1, 19.0
+    debugoptimized: -g -O2
+    release: -O3
+    minsize: -O2
+    """
+
+    BUILD_ARGS: T.Dict[str, T.List[str]] = {
+        'plain': [],
+        'debug': ["-g", "-traceback"],
+        'debugoptimized': ["-g", "-traceback"],
+        'release': [],
+        'minsize': [],
+        'custom': [],
+    }
+
+    OPTIM_ARGS: T.Dict[str, T.List[str]] = {
+        'plain': [],
+        '0': ['-O0'],
+        'g': ['-O0'],
+        '1': ['-O1'],
+        '2': ['-O2'],
+        '3': ['-O3'],
+        's': ['-Os'],
+    }
+    id = 'intel'
+
+    def __init__(self) -> None:
+        super().__init__()
+        # As of 19.0.0 ICC doesn't have sanitizer, color, or lto support.
+        #
+        # It does have IPO, which serves much the same purpose as LOT, but
+        # there is an unfortunate rule for using IPO (you can't control the
+        # name of the output file) which break assumptions meson makes
+        self.base_options = {mesonlib.OptionKey(o) for o in [
+            'b_pch', 'b_lundef', 'b_asneeded', 'b_pgo', 'b_coverage',
+            'b_ndebug', 'b_staticpic', 'b_pie']}
+        self.lang_header = 'none'
+
+    def get_pch_suffix(self) -> str:
+        return 'pchi'
+
+    def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+        return ['-pch', '-pch_dir', os.path.join(pch_dir), '-x',
+                self.lang_header, '-include', header, '-x', 'none']
+
+    def get_pch_name(self, name: str) -> str:
+        return os.path.basename(name) + '.' + self.get_pch_suffix()
+
+    def openmp_flags(self) -> T.List[str]:
+        if mesonlib.version_compare(self.version, '>=15.0.0'):
+            return ['-qopenmp']
+        else:
+            return ['-openmp']
+
+    def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+        extra_args = [
+            '-diag-error', '10006',  # ignoring unknown option
+            '-diag-error', '10148',  # Option not supported
+            '-diag-error', '10155',  # ignoring argument required
+            '-diag-error', '10156',  # ignoring not argument allowed
+            '-diag-error', '10157',  # Ignoring argument of the wrong type
+            '-diag-error', '10158',  # Argument must be separate. Can be hit by trying an option like -foo-bar=foo when -foo=bar is a valid option but -foo-bar isn't
+        ]
+        return super().get_compiler_check_args(mode) + extra_args
+
+    def get_profile_generate_args(self) -> T.List[str]:
+        return ['-prof-gen=threadsafe']
+
+    def get_profile_use_args(self) -> T.List[str]:
+        return ['-prof-use']
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return self.BUILD_ARGS[buildtype]
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return self.OPTIM_ARGS[optimization_level]
+
+    def get_has_func_attribute_extra_args(self, name: str) -> T.List[str]:
+        return ['-diag-error', '1292']
+
+
+class IntelVisualStudioLikeCompiler(VisualStudioLikeCompiler):
+
+    """Abstractions for ICL, the Intel compiler on Windows."""
+
+    BUILD_ARGS: T.Dict[str, T.List[str]] = {
+        'plain': [],
+        'debug': ["/Zi", "/traceback"],
+        'debugoptimized': ["/Zi", "/traceback"],
+        'release': [],
+        'minsize': [],
+        'custom': [],
+    }
+
+    OPTIM_ARGS: T.Dict[str, T.List[str]] = {
+        'plain': [],
+        '0': ['/Od'],
+        'g': ['/Od'],
+        '1': ['/O1'],
+        '2': ['/O2'],
+        '3': ['/O3'],
+        's': ['/Os'],
+    }
+
+    id = 'intel-cl'
+
+    def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+        args = super().get_compiler_check_args(mode)
+        if mode is not CompileCheckMode.LINK:
+            args.extend([
+                '/Qdiag-error:10006',  # ignoring unknown option
+                '/Qdiag-error:10148',  # Option not supported
+                '/Qdiag-error:10155',  # ignoring argument required
+                '/Qdiag-error:10156',  # ignoring not argument allowed
+                '/Qdiag-error:10157',  # Ignoring argument of the wrong type
+                '/Qdiag-error:10158',  # Argument must be separate. Can be hit by trying an option like -foo-bar=foo when -foo=bar is a valid option but -foo-bar isn't
+            ])
+        return args
+
+    def get_toolset_version(self) -> T.Optional[str]:
+        # ICL provides a cl.exe that returns the version of MSVC it tries to
+        # emulate, so we'll get the version from that and pass it to the same
+        # function the real MSVC uses to calculate the toolset version.
+        _, _, err = mesonlib.Popen_safe(['cl.exe'])
+        v1, v2, *_ = mesonlib.search_version(err).split('.')
+        version = int(v1 + v2)
+        return self._calculate_toolset_version(version)
+
+    def openmp_flags(self) -> T.List[str]:
+        return ['/Qopenmp']
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return self.BUILD_ARGS[buildtype]
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return self.OPTIM_ARGS[optimization_level]
+
+    def get_pch_base_name(self, header: str) -> str:
+        return os.path.basename(header)
diff --git a/vendored-meson/meson/mesonbuild/compilers/mixins/islinker.py b/vendored-meson/meson/mesonbuild/compilers/mixins/islinker.py
new file mode 100644
index 000000000000..cfdd746b037f
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/compilers/mixins/islinker.py
@@ -0,0 +1,130 @@
+# Copyright 2019 The Meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Mixins for compilers that *are* linkers.
+
+While many compilers (such as gcc and clang) are used by meson to dispatch
+linker commands and other (like MSVC) are not, a few (such as DMD) actually
+are both the linker and compiler in one binary. This module provides mixin
+classes for those cases.
+"""
+
+import typing as T
+
+from ...mesonlib import EnvironmentException, MesonException, is_windows
+
+if T.TYPE_CHECKING:
+    from ...coredata import KeyedOptionDictType
+    from ...environment import Environment
+    from ...compilers.compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+
+class BasicLinkerIsCompilerMixin(Compiler):
+
+    """Provides a baseline of methods that a linker would implement.
+
+    In every case this provides a "no" or "empty" answer. If a compiler
+    implements any of these it needs a different mixin or to override that
+    functionality itself.
+    """
+
+    def sanitizer_link_args(self, value: str) -> T.List[str]:
+        return []
+
+    def get_lto_link_args(self, *, threads: int = 0, mode: str = 'default',
+                          thinlto_cache_dir: T.Optional[str] = None) -> T.List[str]:
+        return []
+
+    def can_linker_accept_rsp(self) -> bool:
+        return is_windows()
+
+    def get_linker_exelist(self) -> T.List[str]:
+        return self.exelist.copy()
+
+    def get_linker_output_args(self, outputname: str) -> T.List[str]:
+        return []
+
+    def get_linker_always_args(self) -> T.List[str]:
+        return []
+
+    def get_linker_lib_prefix(self) -> str:
+        return ''
+
+    def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        return []
+
+    def has_multi_link_args(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
+        return False, False
+
+    def get_link_debugfile_args(self, targetfile: str) -> T.List[str]:
+        return []
+
+    def get_std_shared_lib_link_args(self) -> T.List[str]:
+        return []
+
+    def get_std_shared_module_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        return self.get_std_shared_lib_link_args()
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        raise EnvironmentException(f'Linker {self.id} does not support link_whole')
+
+    def get_allow_undefined_link_args(self) -> T.List[str]:
+        raise EnvironmentException(f'Linker {self.id} does not support allow undefined')
+
+    def get_pie_link_args(self) -> T.List[str]:
+        raise EnvironmentException(f'Linker {self.id} does not support position-independent executable')
+
+    def get_undefined_link_args(self) -> T.List[str]:
+        return []
+
+    def get_coverage_link_args(self) -> T.List[str]:
+        return []
+
+    def no_undefined_link_args(self) -> T.List[str]:
+        return []
+
+    def bitcode_args(self) -> T.List[str]:
+        raise MesonException("This linker doesn't support bitcode bundles")
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str,
+                        darwin_versions: T.Tuple[str, str]) -> T.List[str]:
+        raise MesonException("This linker doesn't support soname args")
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: T.Tuple[str, ...], build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
+
+    def get_asneeded_args(self) -> T.List[str]:
+        return []
+
+    def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]:
+        return []
+
+    def get_link_debugfile_name(self, targetfile: str) -> T.Optional[str]:
+        return None
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def thread_link_flags(self, env: 'Environment') -> T.List[str]:
+        return []
diff --git a/vendored-meson/meson/mesonbuild/compilers/mixins/metrowerks.py b/vendored-meson/meson/mesonbuild/compilers/mixins/metrowerks.py
new file mode 100644
index 000000000000..83a1c1d7fcbd
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/compilers/mixins/metrowerks.py
@@ -0,0 +1,301 @@
+# Copyright 2012-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Representations specific to the Metrowerks/Freescale Embedded C/C++ compiler family."""
+
+import os
+import typing as T
+
+from ...mesonlib import EnvironmentException, OptionKey
+
+if T.TYPE_CHECKING:
+    from ...envconfig import MachineInfo
+    from ...compilers.compilers import Compiler, CompileCheckMode
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+mwcc_buildtype_args: T.Dict[str, T.List[str]] = {
+    'plain': [],
+    'debug': ['-g'],
+    'debugoptimized': ['-g', '-O4'],
+    'release': ['-O4,p'],
+    'minsize': ['-Os'],
+    'custom': [],
+}
+
+mwccarm_instruction_set_args: T.Dict[str, T.List[str]] = {
+    'generic': ['-proc', 'generic'],
+    'v4': ['-proc', 'v4'],
+    'v4t': ['-proc', 'v4t'],
+    'v5t': ['-proc', 'v5t'],
+    'v5te': ['-proc', 'v5te'],
+    'v6': ['-proc', 'v6'],
+    'arm7tdmi': ['-proc', 'arm7tdmi'],
+    'arm710t': ['-proc', 'arm710t'],
+    'arm720t': ['-proc', 'arm720t'],
+    'arm740t': ['-proc', 'arm740t'],
+    'arm7ej': ['-proc', 'arm7ej'],
+    'arm9tdmi': ['-proc', 'arm9tdmi'],
+    'arm920t': ['-proc', 'arm920t'],
+    'arm922t': ['-proc', 'arm922t'],
+    'arm940t': ['-proc', 'arm940t'],
+    'arm9ej': ['-proc', 'arm9ej'],
+    'arm926ej': ['-proc', 'arm926ej'],
+    'arm946e': ['-proc', 'arm946e'],
+    'arm966e': ['-proc', 'arm966e'],
+    'arm1020e': ['-proc', 'arm1020e'],
+    'arm1022e': ['-proc', 'arm1022e'],
+    'arm1026ej': ['-proc', 'arm1026ej'],
+    'dbmx1': ['-proc', 'dbmx1'],
+    'dbmxl': ['-proc', 'dbmxl'],
+    'XScale': ['-proc', 'XScale'],
+    'pxa255': ['-proc', 'pxa255'],
+    'pxa261': ['-proc', 'pxa261'],
+    'pxa262': ['-proc', 'pxa262'],
+    'pxa263': ['-proc', 'pxa263']
+}
+
+mwcceppc_instruction_set_args: T.Dict[str, T.List[str]] = {
+    'generic': ['-proc', 'generic'],
+    '401': ['-proc', '401'],
+    '403': ['-proc', '403'],
+    '505': ['-proc', '505'],
+    '509': ['-proc', '509'],
+    '555': ['-proc', '555'],
+    '601': ['-proc', '601'],
+    '602': ['-proc', '602'],
+    '603': ['-proc', '603'],
+    '603e': ['-proc', '603e'],
+    '604': ['-proc', '604'],
+    '604e': ['-proc', '604e'],
+    '740': ['-proc', '740'],
+    '750': ['-proc', '750'],
+    '801': ['-proc', '801'],
+    '821': ['-proc', '821'],
+    '823': ['-proc', '823'],
+    '850': ['-proc', '850'],
+    '860': ['-proc', '860'],
+    '7400': ['-proc', '7400'],
+    '7450': ['-proc', '7450'],
+    '8240': ['-proc', '8240'],
+    '8260': ['-proc', '8260'],
+    'e500': ['-proc', 'e500'],
+    'gekko': ['-proc', 'gekko'],
+}
+
+mwasmarm_instruction_set_args: T.Dict[str, T.List[str]] = {
+    'arm4': ['-proc', 'arm4'],
+    'arm4t': ['-proc', 'arm4t'],
+    'arm4xm': ['-proc', 'arm4xm'],
+    'arm4txm': ['-proc', 'arm4txm'],
+    'arm5': ['-proc', 'arm5'],
+    'arm5T': ['-proc', 'arm5T'],
+    'arm5xM': ['-proc', 'arm5xM'],
+    'arm5TxM': ['-proc', 'arm5TxM'],
+    'arm5TE': ['-proc', 'arm5TE'],
+    'arm5TExP': ['-proc', 'arm5TExP'],
+    'arm6': ['-proc', 'arm6'],
+    'xscale': ['-proc', 'xscale']
+}
+
+mwasmeppc_instruction_set_args: T.Dict[str, T.List[str]] = {
+    '401': ['-proc', '401'],
+    '403': ['-proc', '403'],
+    '505': ['-proc', '505'],
+    '509': ['-proc', '509'],
+    '555': ['-proc', '555'],
+    '56X': ['-proc', '56X'],
+    '601': ['-proc', '601'],
+    '602': ['-proc', '602'],
+    '603': ['-proc', '603'],
+    '603e': ['-proc', '603e'],
+    '604': ['-proc', '604'],
+    '604e': ['-proc', '604e'],
+    '740': ['-proc', '740'],
+    '74X': ['-proc', '74X'],
+    '750': ['-proc', '750'],
+    '75X': ['-proc', '75X'],
+    '801': ['-proc', '801'],
+    '821': ['-proc', '821'],
+    '823': ['-proc', '823'],
+    '850': ['-proc', '850'],
+    '85X': ['-proc', '85X'],
+    '860': ['-proc', '860'],
+    '86X': ['-proc', '86X'],
+    '87X': ['-proc', '87X'],
+    '88X': ['-proc', '88X'],
+    '5100': ['-proc', '5100'],
+    '5200': ['-proc', '5200'],
+    '7400': ['-proc', '7400'],
+    '744X': ['-proc', '744X'],
+    '7450': ['-proc', '7450'],
+    '745X': ['-proc', '745X'],
+    '82XX': ['-proc', '82XX'],
+    '8240': ['-proc', '8240'],
+    '824X': ['-proc', '824X'],
+    '8260': ['-proc', '8260'],
+    '827X': ['-proc', '827X'],
+    '8280': ['-proc', '8280'],
+    'e300': ['-proc', 'e300'],
+    'e300c2': ['-proc', 'e300c2'],
+    'e300c3': ['-proc', 'e300c3'],
+    'e300c4': ['-proc', 'e300c4'],
+    'e600': ['-proc', 'e600'],
+    '85xx': ['-proc', '85xx'],
+    'e500': ['-proc', 'e500'],
+    'e500v2': ['-proc', 'e500v2'],
+    'Zen': ['-proc', 'Zen'],
+    '5565': ['-proc', '5565'],
+    '5674': ['-proc', '5674'],
+    'gekko': ['-proc', 'gekko'],
+    'generic': ['-proc', 'generic'],
+}
+
+mwcc_optimization_args: T.Dict[str, T.List[str]] = {
+    'plain': [],
+    '0': ['-O0'],
+    'g': ['-Op'],
+    '1': ['-O1'],
+    '2': ['-O2'],
+    '3': ['-O3'],
+    's': ['-Os']
+}
+
+mwcc_debug_args: T.Dict[bool, T.List[str]] = {
+    False: [],
+    True: ['-g']
+}
+
+
+class MetrowerksCompiler(Compiler):
+    id = 'mwcc'
+
+    # These compilers can actually invoke the linker, but they choke on
+    # linker-specific flags. So it's best to invoke the linker directly
+    INVOKES_LINKER = False
+
+    def __init__(self) -> None:
+        if not self.is_cross:
+            raise EnvironmentException(f'{id} supports only cross-compilation.')
+
+        self.base_options = {
+            OptionKey(o) for o in ['b_pch', 'b_ndebug']}
+
+        default_warn_args: T.List[str] = []
+        self.warn_args: T.Dict[str, T.List[str]] = {
+            '0': ['-w', 'off'],
+            '1': default_warn_args,
+            '2': default_warn_args + ['-w', 'most'],
+            '3': default_warn_args + ['-w', 'all'],
+            'everything': default_warn_args + ['-w', 'full']}
+
+    def depfile_for_object(self, objfile: str) -> T.Optional[str]:
+        # Earlier versions of these compilers do not support specifying
+        # a custom name for a depfile, and can only generate '.d'
+        return os.path.splitext(objfile)[0] + '.' + self.get_depfile_suffix()
+
+    def get_always_args(self) -> T.List[str]:
+        return ['-gccinc']
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return mwcc_buildtype_args[buildtype]
+
+    def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]:
+        return []
+
+    def get_compile_only_args(self) -> T.List[str]:
+        return ['-c']
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return mwcc_debug_args[is_debug]
+
+    def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+        # Check comment in depfile_for_object()
+        return ['-gccdep', '-MD']
+
+    def get_depfile_suffix(self) -> str:
+        return 'd'
+
+    def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+        if not path:
+            path = '.'
+        return ['-I' + path]
+
+    def get_no_optimization_args(self) -> T.List[str]:
+        return ['-opt', 'off']
+
+    def get_no_stdinc_args(self) -> T.List[str]:
+        return ['-nostdinc']
+
+    def get_no_stdlib_link_args(self) -> T.List[str]:
+        return ['-nostdlib']
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return mwcc_optimization_args[optimization_level]
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return ['-o', outputname]
+
+    def get_pic_args(self) -> T.List[str]:
+        return ['-pic']
+
+    def get_preprocess_only_args(self) -> T.List[str]:
+        return ['-E']
+
+    def get_preprocess_to_file_args(self) -> T.List[str]:
+        return ['-P']
+
+    def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+        return ['-prefix', self.get_pch_name(header)]
+
+    def get_pch_name(self, name: str) -> str:
+        return os.path.basename(name) + '.' + self.get_pch_suffix()
+
+    def get_pch_suffix(self) -> str:
+        return 'mch'
+
+    def get_warn_args(self, level: str) -> T.List[str]:
+        return self.warn_args[level]
+
+    def get_werror_args(self) -> T.List[str]:
+        return ['-w', 'error']
+
+    @classmethod
+    def _unix_args_to_native(cls, args: T.List[str], info: MachineInfo) -> T.List[str]:
+        result: T.List[str] = []
+        for i in args:
+            if i.startswith('-D'):
+                i = '-D' + i[2:]
+            if i.startswith('-I'):
+                i = '-I' + i[2:]
+            if i.startswith('-Wl,-rpath='):
+                continue
+            elif i == '--print-search-dirs':
+                continue
+            elif i.startswith('-L'):
+                continue
+            result.append(i)
+        return result
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:2] == '-I':
+                parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+
+        return parameter_list
diff --git a/vendored-meson/meson/mesonbuild/compilers/mixins/pgi.py b/vendored-meson/meson/mesonbuild/compilers/mixins/pgi.py
new file mode 100644
index 000000000000..2fa736c5866e
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/compilers/mixins/pgi.py
@@ -0,0 +1,113 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Abstractions for the PGI family of compilers."""
+
+import typing as T
+import os
+from pathlib import Path
+
+from ..compilers import clike_debug_args, clike_optimization_args
+from ...mesonlib import OptionKey
+
+if T.TYPE_CHECKING:
+    from ...environment import Environment
+    from ...compilers.compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+pgi_buildtype_args = {
+    'plain': [],
+    'debug': [],
+    'debugoptimized': [],
+    'release': [],
+    'minsize': [],
+    'custom': [],
+}  # type: T.Dict[str, T.List[str]]
+
+
+class PGICompiler(Compiler):
+
+    id = 'pgi'
+
+    def __init__(self) -> None:
+        self.base_options = {OptionKey('b_pch')}
+
+        default_warn_args = ['-Minform=inform']
+        self.warn_args: T.Dict[str, T.List[str]] = {
+            '0': [],
+            '1': default_warn_args,
+            '2': default_warn_args,
+            '3': default_warn_args,
+            'everything': default_warn_args
+        }
+
+    def get_module_incdir_args(self) -> T.Tuple[str]:
+        return ('-module', )
+
+    def get_no_warn_args(self) -> T.List[str]:
+        return ['-silent']
+
+    def gen_import_library_args(self, implibname: str) -> T.List[str]:
+        return []
+
+    def get_pic_args(self) -> T.List[str]:
+        # PGI -fPIC is Linux only.
+        if self.info.is_linux():
+            return ['-fPIC']
+        return []
+
+    def openmp_flags(self) -> T.List[str]:
+        return ['-mp']
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return pgi_buildtype_args[buildtype]
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return clike_optimization_args[optimization_level]
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return clike_debug_args[is_debug]
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:2] == '-I' or i[:2] == '-L':
+                parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+        return parameter_list
+
+    def get_always_args(self) -> T.List[str]:
+        return []
+
+    def get_pch_suffix(self) -> str:
+        # PGI defaults to .pch suffix for PCH on Linux and Windows with --pch option
+        return 'pch'
+
+    def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+        # PGI supports PCH for C++ only.
+        hdr = Path(pch_dir).resolve().parent / header
+        if self.language == 'cpp':
+            return ['--pch',
+                    '--pch_dir', str(hdr.parent),
+                    f'-I{hdr.parent}']
+        else:
+            return []
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        # PGI cannot accept -pthread, it's already threaded
+        return []
diff --git a/vendored-meson/meson/mesonbuild/compilers/mixins/ti.py b/vendored-meson/meson/mesonbuild/compilers/mixins/ti.py
new file mode 100644
index 000000000000..ae23c8416bd1
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/compilers/mixins/ti.py
@@ -0,0 +1,152 @@
+# Copyright 2012-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Representations specific to the Texas Instruments compiler family."""
+
+import os
+import typing as T
+
+from ...mesonlib import EnvironmentException
+
+if T.TYPE_CHECKING:
+    from ...envconfig import MachineInfo
+    from ...environment import Environment
+    from ...compilers.compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+ti_buildtype_args: T.Dict[str, T.List[str]] = {
+    'plain': [],
+    'debug': [],
+    'debugoptimized': [],
+    'release': [],
+    'minsize': [],
+    'custom': [],
+}
+
+ti_optimization_args: T.Dict[str, T.List[str]] = {
+    'plain': [],
+    '0': ['-O0'],
+    'g': ['-Ooff'],
+    '1': ['-O1'],
+    '2': ['-O2'],
+    '3': ['-O3'],
+    's': ['-O4']
+}
+
+ti_debug_args: T.Dict[bool, T.List[str]] = {
+    False: [],
+    True: ['-g']
+}
+
+
+class TICompiler(Compiler):
+
+    id = 'ti'
+
+    def __init__(self) -> None:
+        if not self.is_cross:
+            raise EnvironmentException('TI compilers only support cross-compilation.')
+
+        self.can_compile_suffixes.add('asm')    # Assembly
+        self.can_compile_suffixes.add('cla')    # Control Law Accelerator (CLA) used in C2000
+
+        default_warn_args: T.List[str] = []
+        self.warn_args: T.Dict[str, T.List[str]] = {
+            '0': [],
+            '1': default_warn_args,
+            '2': default_warn_args + [],
+            '3': default_warn_args + [],
+            'everything': default_warn_args + []}
+
+    def get_pic_args(self) -> T.List[str]:
+        # PIC support is not enabled by default for TI compilers,
+        # if users want to use it, they need to add the required arguments explicitly
+        return []
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return ti_buildtype_args[buildtype]
+
+    def get_pch_suffix(self) -> str:
+        return 'pch'
+
+    def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+        return []
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def get_coverage_args(self) -> T.List[str]:
+        return []
+
+    def get_no_stdinc_args(self) -> T.List[str]:
+        return []
+
+    def get_no_stdlib_link_args(self) -> T.List[str]:
+        return []
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return ti_optimization_args[optimization_level]
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return ti_debug_args[is_debug]
+
+    def get_compile_only_args(self) -> T.List[str]:
+        return []
+
+    def get_no_optimization_args(self) -> T.List[str]:
+        return ['-Ooff']
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return [f'--output_file={outputname}']
+
+    def get_werror_args(self) -> T.List[str]:
+        return ['--emit_warnings_as_errors']
+
+    def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+        if path == '':
+            path = '.'
+        return ['-I=' + path]
+
+    @classmethod
+    def _unix_args_to_native(cls, args: T.List[str], info: MachineInfo) -> T.List[str]:
+        result: T.List[str] = []
+        for i in args:
+            if i.startswith('-D'):
+                i = '--define=' + i[2:]
+            if i.startswith('-Wl,-rpath='):
+                continue
+            elif i == '--print-search-dirs':
+                continue
+            elif i.startswith('-L'):
+                continue
+            result.append(i)
+        return result
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:15] == '--include_path=':
+                parameter_list[idx] = i[:15] + os.path.normpath(os.path.join(build_dir, i[15:]))
+            if i[:2] == '-I':
+                parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+        return parameter_list
+
+    def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+        return ['--preproc_with_compile', f'--preproc_dependency={outfile}']
diff --git a/vendored-meson/meson/mesonbuild/compilers/mixins/visualstudio.py b/vendored-meson/meson/mesonbuild/compilers/mixins/visualstudio.py
new file mode 100644
index 000000000000..24f11329d42e
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/compilers/mixins/visualstudio.py
@@ -0,0 +1,502 @@
+# Copyright 2019 The meson development team
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Abstractions to simplify compilers that implement an MSVC compatible
+interface.
+"""
+
+import abc
+import os
+import typing as T
+
+from ... import arglist
+from ... import mesonlib
+from ... import mlog
+from mesonbuild.compilers.compilers import CompileCheckMode
+
+if T.TYPE_CHECKING:
+    from ...environment import Environment
+    from ...dependencies import Dependency
+    from .clike import CLikeCompiler as Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+vs32_instruction_set_args: T.Dict[str, T.Optional[T.List[str]]] = {
+    'mmx': ['/arch:SSE'], # There does not seem to be a flag just for MMX
+    'sse': ['/arch:SSE'],
+    'sse2': ['/arch:SSE2'],
+    'sse3': ['/arch:AVX'], # VS leaped from SSE2 directly to AVX.
+    'sse41': ['/arch:AVX'],
+    'sse42': ['/arch:AVX'],
+    'avx': ['/arch:AVX'],
+    'avx2': ['/arch:AVX2'],
+    'neon': None,
+}
+
+# The 64 bit compiler defaults to /arch:avx.
+vs64_instruction_set_args: T.Dict[str, T.Optional[T.List[str]]] = {
+    'mmx': ['/arch:AVX'],
+    'sse': ['/arch:AVX'],
+    'sse2': ['/arch:AVX'],
+    'sse3': ['/arch:AVX'],
+    'ssse3': ['/arch:AVX'],
+    'sse41': ['/arch:AVX'],
+    'sse42': ['/arch:AVX'],
+    'avx': ['/arch:AVX'],
+    'avx2': ['/arch:AVX2'],
+    'neon': None,
+}
+
+msvc_optimization_args: T.Dict[str, T.List[str]] = {
+    'plain': [],
+    '0': ['/Od'],
+    'g': [], # No specific flag to optimize debugging, /Zi or /ZI will create debug information
+    '1': ['/O1'],
+    '2': ['/O2'],
+    '3': ['/O2', '/Gw'],
+    's': ['/O1', '/Gw'],
+}
+
+msvc_debug_args: T.Dict[bool, T.List[str]] = {
+    False: [],
+    True: ['/Zi']
+}
+
+
+class VisualStudioLikeCompiler(Compiler, metaclass=abc.ABCMeta):
+
+    """A common interface for all compilers implementing an MSVC-style
+    interface.
+
+    A number of compilers attempt to mimic MSVC, with varying levels of
+    success, such as Clang-CL and ICL (the Intel C/C++ Compiler for Windows).
+    This class implements as much common logic as possible.
+    """
+
+    std_warn_args = ['/W3']
+    std_opt_args = ['/O2']
+    ignore_libs = arglist.UNIXY_COMPILER_INTERNAL_LIBS + ['execinfo']
+    internal_libs: T.List[str] = []
+
+    crt_args: T.Dict[str, T.List[str]] = {
+        'none': [],
+        'md': ['/MD'],
+        'mdd': ['/MDd'],
+        'mt': ['/MT'],
+        'mtd': ['/MTd'],
+    }
+
+    # /showIncludes is needed for build dependency tracking in Ninja
+    # See: https://ninja-build.org/manual.html#_deps
+    # Assume UTF-8 sources by default, but self.unix_args_to_native() removes it
+    # if `/source-charset` is set too.
+    # It is also dropped if Visual Studio 2013 or earlier is used, since it would
+    # not be supported in that case.
+    always_args = ['/nologo', '/showIncludes', '/utf-8']
+    warn_args: T.Dict[str, T.List[str]] = {
+        '0': [],
+        '1': ['/W2'],
+        '2': ['/W3'],
+        '3': ['/W4'],
+        'everything': ['/Wall'],
+    }
+
+    INVOKES_LINKER = False
+
+    def __init__(self, target: str):
+        self.base_options = {mesonlib.OptionKey(o) for o in ['b_pch', 'b_ndebug', 'b_vscrt']} # FIXME add lto, pgo and the like
+        self.target = target
+        self.is_64 = ('x64' in target) or ('x86_64' in target)
+        # do some canonicalization of target machine
+        if 'x86_64' in target:
+            self.machine = 'x64'
+        elif '86' in target:
+            self.machine = 'x86'
+        elif 'aarch64' in target:
+            self.machine = 'arm64'
+        elif 'arm' in target:
+            self.machine = 'arm'
+        else:
+            self.machine = target
+        if mesonlib.version_compare(self.version, '>=19.28.29910'): # VS 16.9.0 includes cl 19.28.29910
+            self.base_options.add(mesonlib.OptionKey('b_sanitize'))
+        assert self.linker is not None
+        self.linker.machine = self.machine
+
+    # Override CCompiler.get_always_args
+    def get_always_args(self) -> T.List[str]:
+        # TODO: use ImmutableListProtocol[str] here instead
+        return self.always_args.copy()
+
+    def get_pch_suffix(self) -> str:
+        return 'pch'
+
+    def get_pch_name(self, name: str) -> str:
+        chopped = os.path.basename(name).split('.')[:-1]
+        chopped.append(self.get_pch_suffix())
+        pchname = '.'.join(chopped)
+        return pchname
+
+    def get_pch_base_name(self, header: str) -> str:
+        # This needs to be implemented by inheriting classes
+        raise NotImplementedError
+
+    def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+        base = self.get_pch_base_name(header)
+        pchname = self.get_pch_name(header)
+        return ['/FI' + base, '/Yu' + base, '/Fp' + os.path.join(pch_dir, pchname)]
+
+    def get_preprocess_only_args(self) -> T.List[str]:
+        return ['/EP']
+
+    def get_preprocess_to_file_args(self) -> T.List[str]:
+        return ['/EP', '/P']
+
+    def get_compile_only_args(self) -> T.List[str]:
+        return ['/c']
+
+    def get_no_optimization_args(self) -> T.List[str]:
+        return ['/Od', '/Oi-']
+
+    def sanitizer_compile_args(self, value: str) -> T.List[str]:
+        if value == 'none':
+            return []
+        if value != 'address':
+            raise mesonlib.MesonException('VS only supports address sanitizer at the moment.')
+        return ['/fsanitize=address']
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        if self.mode == CompileCheckMode.PREPROCESS:
+            return ['/Fi' + outputname]
+        if outputname.endswith('.exe'):
+            return ['/Fe' + outputname]
+        return ['/Fo' + outputname]
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return []
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return msvc_debug_args[is_debug]
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        args = msvc_optimization_args[optimization_level]
+        if mesonlib.version_compare(self.version, '<18.0'):
+            args = [arg for arg in args if arg != '/Gw']
+        return args
+
+    def linker_to_compiler_args(self, args: T.List[str]) -> T.List[str]:
+        return ['/link'] + args
+
+    def get_pic_args(self) -> T.List[str]:
+        return [] # PIC is handled by the loader on Windows
+
+    def gen_vs_module_defs_args(self, defsfile: str) -> T.List[str]:
+        if not isinstance(defsfile, str):
+            raise RuntimeError('Module definitions file should be str')
+        # With MSVC, DLLs only export symbols that are explicitly exported,
+        # so if a module defs file is specified, we use that to export symbols
+        return ['/DEF:' + defsfile]
+
+    def gen_pch_args(self, header: str, source: str, pchname: str) -> T.Tuple[str, T.List[str]]:
+        objname = os.path.splitext(source)[0] + '.obj'
+        return objname, ['/Yc' + header, '/Fp' + pchname, '/Fo' + objname]
+
+    def openmp_flags(self) -> T.List[str]:
+        return ['/openmp']
+
+    def openmp_link_flags(self) -> T.List[str]:
+        return []
+
+    # FIXME, no idea what these should be.
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    @classmethod
+    def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]:
+        result: T.List[str] = []
+        for i in args:
+            # -mms-bitfields is specific to MinGW-GCC
+            # -pthread is only valid for GCC
+            if i in {'-mms-bitfields', '-pthread'}:
+                continue
+            if i.startswith('-LIBPATH:'):
+                i = '/LIBPATH:' + i[9:]
+            elif i.startswith('-L'):
+                i = '/LIBPATH:' + i[2:]
+            # Translate GNU-style -lfoo library name to the import library
+            elif i.startswith('-l'):
+                name = i[2:]
+                if name in cls.ignore_libs:
+                    # With MSVC, these are provided by the C runtime which is
+                    # linked in by default
+                    continue
+                else:
+                    i = name + '.lib'
+            elif i.startswith('-isystem'):
+                # just use /I for -isystem system include path s
+                if i.startswith('-isystem='):
+                    i = '/I' + i[9:]
+                else:
+                    i = '/I' + i[8:]
+            elif i.startswith('-idirafter'):
+                # same as -isystem, but appends the path instead
+                if i.startswith('-idirafter='):
+                    i = '/I' + i[11:]
+                else:
+                    i = '/I' + i[10:]
+            # -pthread in link flags is only used on Linux
+            elif i == '-pthread':
+                continue
+            # cl.exe does not allow specifying both, so remove /utf-8 that we
+            # added automatically in the case the user overrides it manually.
+            elif (i.startswith('/source-charset:')
+                    or i.startswith('/execution-charset:')
+                    or i == '/validate-charset-'):
+                try:
+                    result.remove('/utf-8')
+                except ValueError:
+                    pass
+            result.append(i)
+        return result
+
+    @classmethod
+    def native_args_to_unix(cls, args: T.List[str]) -> T.List[str]:
+        result: T.List[str] = []
+        for arg in args:
+            if arg.startswith(('/LIBPATH:', '-LIBPATH:')):
+                result.append('-L' + arg[9:])
+            elif arg.endswith(('.a', '.lib')) and not os.path.isabs(arg):
+                result.append('-l' + arg)
+            else:
+                result.append(arg)
+        return result
+
+    def get_werror_args(self) -> T.List[str]:
+        return ['/WX']
+
+    def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+        if path == '':
+            path = '.'
+        # msvc does not have a concept of system header dirs.
+        return ['-I' + path]
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:2] == '-I' or i[:2] == '/I':
+                parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+            elif i[:9] == '/LIBPATH:':
+                parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+
+        return parameter_list
+
+    # Visual Studio is special. It ignores some arguments it does not
+    # understand and you can't tell it to error out on those.
+    # http://stackoverflow.com/questions/15259720/how-can-i-make-the-microsoft-c-compiler-treat-unknown-flags-as-errors-rather-t
+    def has_arguments(self, args: T.List[str], env: 'Environment', code: str, mode: CompileCheckMode) -> T.Tuple[bool, bool]:
+        warning_text = '4044' if mode == CompileCheckMode.LINK else '9002'
+        with self._build_wrapper(code, env, extra_args=args, mode=mode) as p:
+            if p.returncode != 0:
+                return False, p.cached
+            return not (warning_text in p.stderr or warning_text in p.stdout), p.cached
+
+    def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[str]:
+        pdbarr = rel_obj.split('.')[:-1]
+        pdbarr += ['pdb']
+        args = ['/Fd' + '.'.join(pdbarr)]
+        return args
+
+    def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]:
+        if self.is_64:
+            return vs64_instruction_set_args.get(instruction_set, None)
+        return vs32_instruction_set_args.get(instruction_set, None)
+
+    def _calculate_toolset_version(self, version: int) -> T.Optional[str]:
+        if version < 1310:
+            return '7.0'
+        elif version < 1400:
+            return '7.1' # (Visual Studio 2003)
+        elif version < 1500:
+            return '8.0' # (Visual Studio 2005)
+        elif version < 1600:
+            return '9.0' # (Visual Studio 2008)
+        elif version < 1700:
+            return '10.0' # (Visual Studio 2010)
+        elif version < 1800:
+            return '11.0' # (Visual Studio 2012)
+        elif version < 1900:
+            return '12.0' # (Visual Studio 2013)
+        elif version < 1910:
+            return '14.0' # (Visual Studio 2015)
+        elif version < 1920:
+            return '14.1' # (Visual Studio 2017)
+        elif version < 1930:
+            return '14.2' # (Visual Studio 2019)
+        elif version < 1940:
+            return '14.3' # (Visual Studio 2022)
+        mlog.warning(f'Could not find toolset for version {self.version!r}')
+        return None
+
+    def get_toolset_version(self) -> T.Optional[str]:
+        # See boost/config/compiler/visualc.cpp for up to date mapping
+        try:
+            version = int(''.join(self.version.split('.')[0:2]))
+        except ValueError:
+            return None
+        return self._calculate_toolset_version(version)
+
+    def get_default_include_dirs(self) -> T.List[str]:
+        if 'INCLUDE' not in os.environ:
+            return []
+        return os.environ['INCLUDE'].split(os.pathsep)
+
+    def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+        if crt_val in self.crt_args:
+            return self.crt_args[crt_val]
+        assert crt_val in {'from_buildtype', 'static_from_buildtype'}
+        dbg = 'mdd'
+        rel = 'md'
+        if crt_val == 'static_from_buildtype':
+            dbg = 'mtd'
+            rel = 'mt'
+        # Match what build type flags used to do.
+        if buildtype == 'plain':
+            return []
+        elif buildtype == 'debug':
+            return self.crt_args[dbg]
+        elif buildtype == 'debugoptimized':
+            return self.crt_args[rel]
+        elif buildtype == 'release':
+            return self.crt_args[rel]
+        elif buildtype == 'minsize':
+            return self.crt_args[rel]
+        else:
+            assert buildtype == 'custom'
+            raise mesonlib.EnvironmentException('Requested C runtime based on buildtype, but buildtype is "custom".')
+
+    def has_func_attribute(self, name: str, env: 'Environment') -> T.Tuple[bool, bool]:
+        # MSVC doesn't have __attribute__ like Clang and GCC do, so just return
+        # false without compiling anything
+        return name in {'dllimport', 'dllexport'}, False
+
+    def get_argument_syntax(self) -> str:
+        return 'msvc'
+
+    def symbols_have_underscore_prefix(self, env: 'Environment') -> bool:
+        '''
+        Check if the compiler prefixes an underscore to global C symbols.
+
+        This overrides the Clike method, as for MSVC checking the
+        underscore prefix based on the compiler define never works,
+        so do not even try.
+        '''
+        # Try to consult a hardcoded list of cases we know
+        # absolutely have an underscore prefix
+        result = self._symbols_have_underscore_prefix_list(env)
+        if result is not None:
+            return result
+
+        # As a last resort, try search in a compiled binary
+        return self._symbols_have_underscore_prefix_searchbin(env)
+
+
+class MSVCCompiler(VisualStudioLikeCompiler):
+
+    """Specific to the Microsoft Compilers."""
+
+    id = 'msvc'
+
+    def __init__(self, target: str):
+        super().__init__(target)
+
+        # Visual Studio 2013 and earlier don't support the /utf-8 argument.
+        # We want to remove it. We also want to make an explicit copy so we
+        # don't mutate class constant state
+        if mesonlib.version_compare(self.version, '<19.00') and '/utf-8' in self.always_args:
+            self.always_args = [r for r in self.always_args if r != '/utf-8']
+
+    def get_compile_debugfile_args(self, rel_obj: str, pch: bool = False) -> T.List[str]:
+        args = super().get_compile_debugfile_args(rel_obj, pch)
+        # When generating a PDB file with PCH, all compile commands write
+        # to the same PDB file. Hence, we need to serialize the PDB
+        # writes using /FS since we do parallel builds. This slows down the
+        # build obviously, which is why we only do this when PCH is on.
+        # This was added in Visual Studio 2013 (MSVC 18.0). Before that it was
+        # always on: https://msdn.microsoft.com/en-us/library/dn502518.aspx
+        if pch and mesonlib.version_compare(self.version, '>=18.0'):
+            args = ['/FS'] + args
+        return args
+
+    # Override CCompiler.get_always_args
+    # We want to drop '/utf-8' for Visual Studio 2013 and earlier
+    def get_always_args(self) -> T.List[str]:
+        return self.always_args
+
+    def get_instruction_set_args(self, instruction_set: str) -> T.Optional[T.List[str]]:
+        if self.version.split('.')[0] == '16' and instruction_set == 'avx':
+            # VS documentation says that this exists and should work, but
+            # it does not. The headers do not contain AVX intrinsics
+            # and they cannot be called.
+            return None
+        return super().get_instruction_set_args(instruction_set)
+
+    def get_pch_base_name(self, header: str) -> str:
+        return os.path.basename(header)
+
+
+class ClangClCompiler(VisualStudioLikeCompiler):
+
+    """Specific to Clang-CL."""
+
+    id = 'clang-cl'
+
+    def __init__(self, target: str):
+        super().__init__(target)
+
+        # Assembly
+        self.can_compile_suffixes.add('s')
+        self.can_compile_suffixes.add('sx')
+
+    def has_arguments(self, args: T.List[str], env: 'Environment', code: str, mode: CompileCheckMode) -> T.Tuple[bool, bool]:
+        if mode != CompileCheckMode.LINK:
+            args = args + ['-Werror=unknown-argument', '-Werror=unknown-warning-option']
+        return super().has_arguments(args, env, code, mode)
+
+    def get_toolset_version(self) -> T.Optional[str]:
+        # XXX: what is the right thing to do here?
+        return '14.1'
+
+    def get_pch_base_name(self, header: str) -> str:
+        return header
+
+    def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+        if path == '':
+            path = '.'
+        return ['/clang:-isystem' + path] if is_system else ['-I' + path]
+
+    def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]:
+        if dep.get_include_type() == 'system':
+            converted: T.List[str] = []
+            for i in dep.get_compile_args():
+                if i.startswith('-isystem'):
+                    converted += ['/clang:' + i]
+                else:
+                    converted += [i]
+            return converted
+        else:
+            return dep.get_compile_args()
diff --git a/vendored-meson/meson/mesonbuild/compilers/mixins/xc16.py b/vendored-meson/meson/mesonbuild/compilers/mixins/xc16.py
new file mode 100644
index 000000000000..36c2c10ab9bd
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/compilers/mixins/xc16.py
@@ -0,0 +1,133 @@
+# Copyright 2012-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Representations specific to the Microchip XC16 C compiler family."""
+
+import os
+import typing as T
+
+from ...mesonlib import EnvironmentException
+
+if T.TYPE_CHECKING:
+    from ...envconfig import MachineInfo
+    from ...environment import Environment
+    from ...compilers.compilers import Compiler
+else:
+    # This is a bit clever, for mypy we pretend that these mixins descend from
+    # Compiler, so we get all of the methods and attributes defined for us, but
+    # for runtime we make them descend from object (which all classes normally
+    # do). This gives up DRYer type checking, with no runtime impact
+    Compiler = object
+
+xc16_buildtype_args = {
+    'plain': [],
+    'debug': [],
+    'debugoptimized': [],
+    'release': [],
+    'minsize': [],
+    'custom': [],
+}  # type: T.Dict[str, T.List[str]]
+
+xc16_optimization_args = {
+    'plain': [],
+    '0': ['-O0'],
+    'g': ['-O0'],
+    '1': ['-O1'],
+    '2': ['-O2'],
+    '3': ['-O3'],
+    's': ['-Os']
+}  # type: T.Dict[str, T.List[str]]
+
+xc16_debug_args = {
+    False: [],
+    True: []
+}  # type: T.Dict[bool, T.List[str]]
+
+
+class Xc16Compiler(Compiler):
+
+    id = 'xc16'
+
+    def __init__(self) -> None:
+        if not self.is_cross:
+            raise EnvironmentException('xc16 supports only cross-compilation.')
+        # Assembly
+        self.can_compile_suffixes.add('s')
+        self.can_compile_suffixes.add('sx')
+        default_warn_args = []  # type: T.List[str]
+        self.warn_args = {'0': [],
+                          '1': default_warn_args,
+                          '2': default_warn_args + [],
+                          '3': default_warn_args + [],
+                          'everything': default_warn_args + []}  # type: T.Dict[str, T.List[str]]
+
+    def get_always_args(self) -> T.List[str]:
+        return []
+
+    def get_pic_args(self) -> T.List[str]:
+        # PIC support is not enabled by default for xc16,
+        # if users want to use it, they need to add the required arguments explicitly
+        return []
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return xc16_buildtype_args[buildtype]
+
+    def get_pch_suffix(self) -> str:
+        return 'pch'
+
+    def get_pch_use_args(self, pch_dir: str, header: str) -> T.List[str]:
+        return []
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def get_coverage_args(self) -> T.List[str]:
+        return []
+
+    def get_no_stdinc_args(self) -> T.List[str]:
+        return ['-nostdinc']
+
+    def get_no_stdlib_link_args(self) -> T.List[str]:
+        return ['--nostdlib']
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return xc16_optimization_args[optimization_level]
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return xc16_debug_args[is_debug]
+
+    @classmethod
+    def _unix_args_to_native(cls, args: T.List[str], info: MachineInfo) -> T.List[str]:
+        result = []
+        for i in args:
+            if i.startswith('-D'):
+                i = '-D' + i[2:]
+            if i.startswith('-I'):
+                i = '-I' + i[2:]
+            if i.startswith('-Wl,-rpath='):
+                continue
+            elif i == '--print-search-dirs':
+                continue
+            elif i.startswith('-L'):
+                continue
+            result.append(i)
+        return result
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str], build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:9] == '-I':
+                parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+
+        return parameter_list
diff --git a/vendored-meson/meson/mesonbuild/compilers/objc.py b/vendored-meson/meson/mesonbuild/compilers/objc.py
new file mode 100644
index 000000000000..cb8eb9e1fb88
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/compilers/objc.py
@@ -0,0 +1,114 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import typing as T
+
+from .. import coredata
+from ..mesonlib import OptionKey
+
+from .compilers import Compiler
+from .mixins.clike import CLikeCompiler
+from .mixins.gnu import GnuCompiler, gnu_common_warning_args, gnu_objc_warning_args
+from .mixins.clang import ClangCompiler
+
+if T.TYPE_CHECKING:
+    from ..programs import ExternalProgram
+    from ..envconfig import MachineInfo
+    from ..environment import Environment
+    from ..linkers.linkers import DynamicLinker
+    from ..mesonlib import MachineChoice
+
+
+class ObjCCompiler(CLikeCompiler, Compiler):
+
+    language = 'objc'
+
+    def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrap: T.Optional['ExternalProgram'],
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
+        Compiler.__init__(self, ccache, exelist, version, for_machine, info,
+                          is_cross=is_cross, full_version=full_version,
+                          linker=linker)
+        CLikeCompiler.__init__(self, exe_wrap)
+
+    @staticmethod
+    def get_display_language() -> str:
+        return 'Objective-C'
+
+    def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+        code = '#import\nint main(void) { return 0; }\n'
+        return self._sanity_check_impl(work_dir, environment, 'sanitycheckobjc.m', code)
+
+
+class GnuObjCCompiler(GnuCompiler, ObjCCompiler):
+    def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrapper: T.Optional['ExternalProgram'] = None,
+                 defines: T.Optional[T.Dict[str, str]] = None,
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
+        ObjCCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross,
+                              info, exe_wrapper, linker=linker, full_version=full_version)
+        GnuCompiler.__init__(self, defines)
+        default_warn_args = ['-Wall', '-Winvalid-pch']
+        self.warn_args = {'0': [],
+                          '1': default_warn_args,
+                          '2': default_warn_args + ['-Wextra'],
+                          '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+                          'everything': (default_warn_args + ['-Wextra', '-Wpedantic'] +
+                                         self.supported_warn_args(gnu_common_warning_args) +
+                                         self.supported_warn_args(gnu_objc_warning_args))}
+
+
+class ClangObjCCompiler(ClangCompiler, ObjCCompiler):
+    def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrapper: T.Optional['ExternalProgram'] = None,
+                 defines: T.Optional[T.Dict[str, str]] = None,
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
+        ObjCCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross,
+                              info, exe_wrapper, linker=linker, full_version=full_version)
+        ClangCompiler.__init__(self, defines)
+        default_warn_args = ['-Wall', '-Winvalid-pch']
+        self.warn_args = {'0': [],
+                          '1': default_warn_args,
+                          '2': default_warn_args + ['-Wextra'],
+                          '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+                          'everything': ['-Weverything']}
+
+    def get_options(self) -> 'coredata.MutableKeyedOptionDictType':
+        opts = super().get_options()
+        opts.update({
+            OptionKey('std', machine=self.for_machine, lang='c'): coredata.UserComboOption(
+                'C language standard to use',
+                ['none', 'c89', 'c99', 'c11', 'c17', 'gnu89', 'gnu99', 'gnu11', 'gnu17'],
+                'none',
+            )
+        })
+        return opts
+
+    def get_option_compile_args(self, options: 'coredata.KeyedOptionDictType') -> T.List[str]:
+        args = []
+        std = options[OptionKey('std', machine=self.for_machine, lang='c')]
+        if std.value != 'none':
+            args.append('-std=' + std.value)
+        return args
+
+class AppleClangObjCCompiler(ClangObjCCompiler):
+
+    """Handle the differences between Apple's clang and vanilla clang."""
diff --git a/vendored-meson/meson/mesonbuild/compilers/objcpp.py b/vendored-meson/meson/mesonbuild/compilers/objcpp.py
new file mode 100644
index 000000000000..530bc7cb63cb
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/compilers/objcpp.py
@@ -0,0 +1,115 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import typing as T
+
+from .. import coredata
+from ..mesonlib import OptionKey
+
+from .mixins.clike import CLikeCompiler
+from .compilers import Compiler
+from .mixins.gnu import GnuCompiler, gnu_common_warning_args, gnu_objc_warning_args
+from .mixins.clang import ClangCompiler
+
+if T.TYPE_CHECKING:
+    from ..programs import ExternalProgram
+    from ..envconfig import MachineInfo
+    from ..environment import Environment
+    from ..linkers.linkers import DynamicLinker
+    from ..mesonlib import MachineChoice
+
+class ObjCPPCompiler(CLikeCompiler, Compiler):
+
+    language = 'objcpp'
+
+    def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrap: T.Optional['ExternalProgram'],
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
+        Compiler.__init__(self, ccache, exelist, version, for_machine, info,
+                          is_cross=is_cross, full_version=full_version,
+                          linker=linker)
+        CLikeCompiler.__init__(self, exe_wrap)
+
+    @staticmethod
+    def get_display_language() -> str:
+        return 'Objective-C++'
+
+    def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+        code = '#import\nclass MyClass;int main(void) { return 0; }\n'
+        return self._sanity_check_impl(work_dir, environment, 'sanitycheckobjcpp.mm', code)
+
+
+class GnuObjCPPCompiler(GnuCompiler, ObjCPPCompiler):
+    def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrapper: T.Optional['ExternalProgram'] = None,
+                 defines: T.Optional[T.Dict[str, str]] = None,
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
+        ObjCPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross,
+                                info, exe_wrapper, linker=linker, full_version=full_version)
+        GnuCompiler.__init__(self, defines)
+        default_warn_args = ['-Wall', '-Winvalid-pch']
+        self.warn_args = {'0': [],
+                          '1': default_warn_args,
+                          '2': default_warn_args + ['-Wextra'],
+                          '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+                          'everything': (default_warn_args + ['-Wextra', '-Wpedantic'] +
+                                         self.supported_warn_args(gnu_common_warning_args) +
+                                         self.supported_warn_args(gnu_objc_warning_args))}
+
+
+class ClangObjCPPCompiler(ClangCompiler, ObjCPPCompiler):
+
+    def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrapper: T.Optional['ExternalProgram'] = None,
+                 defines: T.Optional[T.Dict[str, str]] = None,
+                 linker: T.Optional['DynamicLinker'] = None,
+                 full_version: T.Optional[str] = None):
+        ObjCPPCompiler.__init__(self, ccache, exelist, version, for_machine, is_cross,
+                                info, exe_wrapper, linker=linker, full_version=full_version)
+        ClangCompiler.__init__(self, defines)
+        default_warn_args = ['-Wall', '-Winvalid-pch']
+        self.warn_args = {'0': [],
+                          '1': default_warn_args,
+                          '2': default_warn_args + ['-Wextra'],
+                          '3': default_warn_args + ['-Wextra', '-Wpedantic'],
+                          'everything': ['-Weverything']}
+
+    def get_options(self) -> 'coredata.MutableKeyedOptionDictType':
+        opts = super().get_options()
+        opts.update({
+            OptionKey('std', machine=self.for_machine, lang='cpp'): coredata.UserComboOption(
+                'C++ language standard to use',
+                ['none', 'c++98', 'c++11', 'c++14', 'c++17', 'gnu++98', 'gnu++11', 'gnu++14', 'gnu++17'],
+                'none',
+            )
+        })
+        return opts
+
+    def get_option_compile_args(self, options: 'coredata.KeyedOptionDictType') -> T.List[str]:
+        args = []
+        std = options[OptionKey('std', machine=self.for_machine, lang='cpp')]
+        if std.value != 'none':
+            args.append('-std=' + std.value)
+        return args
+
+
+class AppleClangObjCPPCompiler(ClangObjCPPCompiler):
+
+    """Handle the differences between Apple's clang and vanilla clang."""
diff --git a/vendored-meson/meson/mesonbuild/compilers/rust.py b/vendored-meson/meson/mesonbuild/compilers/rust.py
new file mode 100644
index 000000000000..ef0390e97794
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/compilers/rust.py
@@ -0,0 +1,230 @@
+# Copyright 2012-2022 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import subprocess, os.path
+import textwrap
+import re
+import typing as T
+
+from .. import coredata, mlog
+from ..mesonlib import EnvironmentException, MesonException, Popen_safe, OptionKey, join_args
+from .compilers import Compiler, rust_buildtype_args, clike_debug_args
+
+if T.TYPE_CHECKING:
+    from ..coredata import MutableKeyedOptionDictType, KeyedOptionDictType
+    from ..envconfig import MachineInfo
+    from ..environment import Environment  # noqa: F401
+    from ..linkers.linkers import DynamicLinker
+    from ..mesonlib import MachineChoice
+    from ..programs import ExternalProgram
+    from ..dependencies import Dependency
+
+
+rust_optimization_args = {
+    'plain': [],
+    '0': [],
+    'g': ['-C', 'opt-level=0'],
+    '1': ['-C', 'opt-level=1'],
+    '2': ['-C', 'opt-level=2'],
+    '3': ['-C', 'opt-level=3'],
+    's': ['-C', 'opt-level=s'],
+}  # type: T.Dict[str, T.List[str]]
+
+class RustCompiler(Compiler):
+
+    # rustc doesn't invoke the compiler itself, it doesn't need a LINKER_PREFIX
+    language = 'rust'
+    id = 'rustc'
+
+    _WARNING_LEVELS: T.Dict[str, T.List[str]] = {
+        '0': ['-A', 'warnings'],
+        '1': [],
+        '2': [],
+        '3': ['-W', 'warnings'],
+    }
+
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo',
+                 exe_wrapper: T.Optional['ExternalProgram'] = None,
+                 full_version: T.Optional[str] = None,
+                 linker: T.Optional['DynamicLinker'] = None):
+        super().__init__([], exelist, version, for_machine, info,
+                         is_cross=is_cross, full_version=full_version,
+                         linker=linker)
+        self.exe_wrapper = exe_wrapper
+        self.base_options.update({OptionKey(o) for o in ['b_colorout', 'b_ndebug']})
+        if 'link' in self.linker.id:
+            self.base_options.add(OptionKey('b_vscrt'))
+        self.native_static_libs: T.List[str] = []
+
+    def needs_static_linker(self) -> bool:
+        return False
+
+    def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+        source_name = os.path.join(work_dir, 'sanity.rs')
+        output_name = os.path.join(work_dir, 'rusttest')
+        with open(source_name, 'w', encoding='utf-8') as ofile:
+            ofile.write(textwrap.dedent(
+                '''fn main() {
+                }
+                '''))
+
+        cmdlist = self.exelist + ['-o', output_name, source_name]
+        pc, stdo, stde = Popen_safe(cmdlist, cwd=work_dir)
+        mlog.debug('Sanity check compiler command line:', join_args(cmdlist))
+        mlog.debug('Sanity check compile stdout:')
+        mlog.debug(stdo)
+        mlog.debug('-----\nSanity check compile stderr:')
+        mlog.debug(stde)
+        mlog.debug('-----')
+        if pc.returncode != 0:
+            raise EnvironmentException(f'Rust compiler {self.name_string()} cannot compile programs.')
+        if self.is_cross:
+            if self.exe_wrapper is None:
+                # Can't check if the binaries run so we have to assume they do
+                return
+            cmdlist = self.exe_wrapper.get_command() + [output_name]
+        else:
+            cmdlist = [output_name]
+        pe = subprocess.Popen(cmdlist, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
+        pe.wait()
+        if pe.returncode != 0:
+            raise EnvironmentException(f'Executables created by Rust compiler {self.name_string()} are not runnable.')
+        # Get libraries needed to link with a Rust staticlib
+        cmdlist = self.exelist + ['--crate-type', 'staticlib', '--print', 'native-static-libs', source_name]
+        p, stdo, stde = Popen_safe(cmdlist, cwd=work_dir)
+        if p.returncode == 0:
+            match = re.search('native-static-libs: (.*)$', stde, re.MULTILINE)
+            if match:
+                # Exclude some well known libraries that we don't need because they
+                # are always part of C/C++ linkers. Rustc probably should not print
+                # them, pkg-config for example never specify them.
+                # FIXME: https://github.com/rust-lang/rust/issues/55120
+                exclude = {'-lc', '-lgcc_s', '-lkernel32', '-ladvapi32'}
+                self.native_static_libs = [i for i in match.group(1).split() if i not in exclude]
+
+    def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+        return ['--dep-info', outfile]
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return rust_buildtype_args[buildtype]
+
+    def get_sysroot(self) -> str:
+        cmd = self.get_exelist(ccache=False) + ['--print', 'sysroot']
+        p, stdo, stde = Popen_safe(cmd)
+        return stdo.split('\n', maxsplit=1)[0]
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return clike_debug_args[is_debug]
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return rust_optimization_args[optimization_level]
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+                                               build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:2] == '-L':
+                for j in ['dependency', 'crate', 'native', 'framework', 'all']:
+                    combined_len = len(j) + 3
+                    if i[:combined_len] == f'-L{j}=':
+                        parameter_list[idx] = i[:combined_len] + os.path.normpath(os.path.join(build_dir, i[combined_len:]))
+                        break
+
+        return parameter_list
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return ['-o', outputname]
+
+    @classmethod
+    def use_linker_args(cls, linker: str, version: str) -> T.List[str]:
+        return ['-C', f'linker={linker}']
+
+    # Rust does not have a use_linker_args because it dispatches to a gcc-like
+    # C compiler for dynamic linking, as such we invoke the C compiler's
+    # use_linker_args method instead.
+
+    def get_options(self) -> 'MutableKeyedOptionDictType':
+        key = OptionKey('std', machine=self.for_machine, lang=self.language)
+        return {
+            key: coredata.UserComboOption(
+                'Rust edition to use',
+                ['none', '2015', '2018', '2021'],
+                'none',
+            ),
+        }
+
+    def get_dependency_compile_args(self, dep: 'Dependency') -> T.List[str]:
+        # Rust doesn't have dependency compile arguments so simply return
+        # nothing here. Dependencies are linked and all required metadata is
+        # provided by the linker flags.
+        return []
+
+    def get_option_compile_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        args = []
+        key = OptionKey('std', machine=self.for_machine, lang=self.language)
+        std = options[key]
+        if std.value != 'none':
+            args.append('--edition=' + std.value)
+        return args
+
+    def get_crt_compile_args(self, crt_val: str, buildtype: str) -> T.List[str]:
+        # Rust handles this for us, we don't need to do anything
+        return []
+
+    def get_colorout_args(self, colortype: str) -> T.List[str]:
+        if colortype in {'always', 'never', 'auto'}:
+            return [f'--color={colortype}']
+        raise MesonException(f'Invalid color type for rust {colortype}')
+
+    def get_linker_always_args(self) -> T.List[str]:
+        args: T.List[str] = []
+        for a in super().get_linker_always_args():
+            args.extend(['-C', f'link-arg={a}'])
+        return args
+
+    def get_werror_args(self) -> T.List[str]:
+        # Use -D warnings, which makes every warning not explicitly allowed an
+        # error
+        return ['-D', 'warnings']
+
+    def get_warn_args(self, level: str) -> T.List[str]:
+        # TODO: I'm not really sure what to put here, Rustc doesn't have warning
+        return self._WARNING_LEVELS[level]
+
+    def get_no_warn_args(self) -> T.List[str]:
+        return self._WARNING_LEVELS["0"]
+
+    def get_pic_args(self) -> T.List[str]:
+        # relocation-model=pic is rustc's default already.
+        return []
+
+    def get_pie_args(self) -> T.List[str]:
+        # Rustc currently has no way to toggle this, it's controlled by whether
+        # pic is on by rustc
+        return []
+
+    def get_assert_args(self, disable: bool) -> T.List[str]:
+        action = "no" if disable else "yes"
+        return ['-C', f'debug-assertions={action}', '-C', 'overflow-checks=no']
+
+
+class ClippyRustCompiler(RustCompiler):
+
+    """Clippy is a linter that wraps Rustc.
+
+    This just provides us a different id
+    """
+
+    id = 'clippy-driver rustc'
diff --git a/vendored-meson/meson/mesonbuild/compilers/swift.py b/vendored-meson/meson/mesonbuild/compilers/swift.py
new file mode 100644
index 000000000000..19866e2c0793
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/compilers/swift.py
@@ -0,0 +1,130 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import subprocess, os.path
+import typing as T
+
+from ..mesonlib import EnvironmentException
+
+from .compilers import Compiler, swift_buildtype_args, clike_debug_args
+
+if T.TYPE_CHECKING:
+    from ..envconfig import MachineInfo
+    from ..environment import Environment
+    from ..linkers.linkers import DynamicLinker
+    from ..mesonlib import MachineChoice
+
+swift_optimization_args = {
+    'plain': [],
+    '0': [],
+    'g': [],
+    '1': ['-O'],
+    '2': ['-O'],
+    '3': ['-O'],
+    's': ['-O'],
+}  # type: T.Dict[str, T.List[str]]
+
+class SwiftCompiler(Compiler):
+
+    LINKER_PREFIX = ['-Xlinker']
+    language = 'swift'
+    id = 'llvm'
+
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo', full_version: T.Optional[str] = None,
+                 linker: T.Optional['DynamicLinker'] = None):
+        super().__init__([], exelist, version, for_machine, info,
+                         is_cross=is_cross, full_version=full_version,
+                         linker=linker)
+        self.version = version
+
+    def needs_static_linker(self) -> bool:
+        return True
+
+    def get_werror_args(self) -> T.List[str]:
+        return ['--fatal-warnings']
+
+    def get_dependency_gen_args(self, outtarget: str, outfile: str) -> T.List[str]:
+        return ['-emit-dependencies']
+
+    def depfile_for_object(self, objfile: str) -> T.Optional[str]:
+        return os.path.splitext(objfile)[0] + '.' + self.get_depfile_suffix()
+
+    def get_depfile_suffix(self) -> str:
+        return 'd'
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return ['-o', target]
+
+    def get_header_import_args(self, headername: str) -> T.List[str]:
+        return ['-import-objc-header', headername]
+
+    def get_warn_args(self, level: str) -> T.List[str]:
+        return []
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return swift_buildtype_args[buildtype]
+
+    def get_std_exe_link_args(self) -> T.List[str]:
+        return ['-emit-executable']
+
+    def get_module_args(self, modname: str) -> T.List[str]:
+        return ['-module-name', modname]
+
+    def get_mod_gen_args(self) -> T.List[str]:
+        return ['-emit-module']
+
+    def get_include_args(self, path: str, is_system: bool) -> T.List[str]:
+        return ['-I' + path]
+
+    def get_compile_only_args(self) -> T.List[str]:
+        return ['-c']
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+                                               build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:2] == '-I' or i[:2] == '-L':
+                parameter_list[idx] = i[:2] + os.path.normpath(os.path.join(build_dir, i[2:]))
+
+        return parameter_list
+
+    def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+        src = 'swifttest.swift'
+        source_name = os.path.join(work_dir, src)
+        output_name = os.path.join(work_dir, 'swifttest')
+        extra_flags: T.List[str] = []
+        extra_flags += environment.coredata.get_external_args(self.for_machine, self.language)
+        if self.is_cross:
+            extra_flags += self.get_compile_only_args()
+        else:
+            extra_flags += environment.coredata.get_external_link_args(self.for_machine, self.language)
+        with open(source_name, 'w', encoding='utf-8') as ofile:
+            ofile.write('''print("Swift compilation is working.")
+''')
+        pc = subprocess.Popen(self.exelist + extra_flags + ['-emit-executable', '-o', output_name, src], cwd=work_dir)
+        pc.wait()
+        if pc.returncode != 0:
+            raise EnvironmentException('Swift compiler %s cannot compile programs.' % self.name_string())
+        if self.is_cross:
+            # Can't check if the binaries run so we have to assume they do
+            return
+        if subprocess.call(output_name) != 0:
+            raise EnvironmentException('Executables created by Swift compiler %s are not runnable.' % self.name_string())
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return clike_debug_args[is_debug]
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return swift_optimization_args[optimization_level]
diff --git a/vendored-meson/meson/mesonbuild/compilers/vala.py b/vendored-meson/meson/mesonbuild/compilers/vala.py
new file mode 100644
index 000000000000..c6af04a274bd
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/compilers/vala.py
@@ -0,0 +1,140 @@
+# Copyright 2012-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os.path
+import typing as T
+
+from .. import mlog
+from ..mesonlib import EnvironmentException, version_compare, OptionKey
+
+from .compilers import CompileCheckMode, Compiler, LibType
+
+if T.TYPE_CHECKING:
+    from ..envconfig import MachineInfo
+    from ..environment import Environment
+    from ..mesonlib import MachineChoice
+
+class ValaCompiler(Compiler):
+
+    language = 'vala'
+    id = 'valac'
+
+    def __init__(self, exelist: T.List[str], version: str, for_machine: MachineChoice,
+                 is_cross: bool, info: 'MachineInfo'):
+        super().__init__([], exelist, version, for_machine, info, is_cross=is_cross)
+        self.version = version
+        self.base_options = {OptionKey('b_colorout')}
+
+    def needs_static_linker(self) -> bool:
+        return False # Because compiles into C.
+
+    def get_optimization_args(self, optimization_level: str) -> T.List[str]:
+        return []
+
+    def get_debug_args(self, is_debug: bool) -> T.List[str]:
+        return ['--debug'] if is_debug else []
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return [] # Because compiles into C.
+
+    def get_compile_only_args(self) -> T.List[str]:
+        return [] # Because compiles into C.
+
+    def get_pic_args(self) -> T.List[str]:
+        return []
+
+    def get_pie_args(self) -> T.List[str]:
+        return []
+
+    def get_pie_link_args(self) -> T.List[str]:
+        return []
+
+    def get_always_args(self) -> T.List[str]:
+        return ['-C']
+
+    def get_warn_args(self, level: str) -> T.List[str]:
+        return []
+
+    def get_no_warn_args(self) -> T.List[str]:
+        return ['--disable-warnings']
+
+    def get_werror_args(self) -> T.List[str]:
+        return ['--fatal-warnings']
+
+    def get_colorout_args(self, colortype: str) -> T.List[str]:
+        if version_compare(self.version, '>=0.37.1'):
+            return ['--color=' + colortype]
+        return []
+
+    def compute_parameters_with_absolute_paths(self, parameter_list: T.List[str],
+                                               build_dir: str) -> T.List[str]:
+        for idx, i in enumerate(parameter_list):
+            if i[:9] == '--girdir=':
+                parameter_list[idx] = i[:9] + os.path.normpath(os.path.join(build_dir, i[9:]))
+            if i[:10] == '--vapidir=':
+                parameter_list[idx] = i[:10] + os.path.normpath(os.path.join(build_dir, i[10:]))
+            if i[:13] == '--includedir=':
+                parameter_list[idx] = i[:13] + os.path.normpath(os.path.join(build_dir, i[13:]))
+            if i[:14] == '--metadatadir=':
+                parameter_list[idx] = i[:14] + os.path.normpath(os.path.join(build_dir, i[14:]))
+
+        return parameter_list
+
+    def sanity_check(self, work_dir: str, environment: 'Environment') -> None:
+        code = 'class MesonSanityCheck : Object { }'
+        extra_flags: T.List[str] = []
+        extra_flags += environment.coredata.get_external_args(self.for_machine, self.language)
+        if self.is_cross:
+            extra_flags += self.get_compile_only_args()
+        else:
+            extra_flags += environment.coredata.get_external_link_args(self.for_machine, self.language)
+        with self.cached_compile(code, environment.coredata, extra_args=extra_flags, mode=CompileCheckMode.COMPILE) as p:
+            if p.returncode != 0:
+                msg = f'Vala compiler {self.name_string()!r} cannot compile programs'
+                raise EnvironmentException(msg)
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        if buildtype in {'debug', 'debugoptimized', 'minsize'}:
+            return ['--debug']
+        return []
+
+    def find_library(self, libname: str, env: 'Environment', extra_dirs: T.List[str],
+                     libtype: LibType = LibType.PREFER_SHARED, lib_prefix_warning: bool = True) -> T.Optional[T.List[str]]:
+        if extra_dirs and isinstance(extra_dirs, str):
+            extra_dirs = [extra_dirs]
+        # Valac always looks in the default vapi dir, so only search there if
+        # no extra dirs are specified.
+        if not extra_dirs:
+            code = 'class MesonFindLibrary : Object { }'
+            args: T.List[str] = []
+            args += env.coredata.get_external_args(self.for_machine, self.language)
+            vapi_args = ['--pkg', libname]
+            args += vapi_args
+            with self.cached_compile(code, env.coredata, extra_args=args, mode=CompileCheckMode.COMPILE) as p:
+                if p.returncode == 0:
+                    return vapi_args
+        # Not found? Try to find the vapi file itself.
+        for d in extra_dirs:
+            vapi = os.path.join(d, libname + '.vapi')
+            if os.path.isfile(vapi):
+                return [vapi]
+        mlog.debug(f'Searched {extra_dirs!r} and {libname!r} wasn\'t found')
+        return None
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def thread_link_flags(self, env: 'Environment') -> T.List[str]:
+        return []
diff --git a/vendored-meson/meson/mesonbuild/coredata.py b/vendored-meson/meson/mesonbuild/coredata.py
new file mode 100644
index 000000000000..a6178f04dc26
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/coredata.py
@@ -0,0 +1,1341 @@
+# Copyright 2013-2023 The Meson development team
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import copy
+
+from . import mlog, mparser
+import pickle, os, uuid
+import sys
+from itertools import chain
+from pathlib import PurePath
+from collections import OrderedDict
+from .mesonlib import (
+    HoldableObject,
+    MesonException, EnvironmentException, MachineChoice, PerMachine,
+    PerMachineDefaultable, default_libdir, default_libexecdir,
+    default_prefix, default_datadir, default_includedir, default_infodir,
+    default_localedir, default_mandir, default_sbindir, default_sysconfdir,
+    split_args, OptionKey, OptionType, stringlistify,
+    pickle_load
+)
+from .wrap import WrapMode
+import ast
+import argparse
+import configparser
+import enum
+import shlex
+import typing as T
+
+if T.TYPE_CHECKING:
+    from . import dependencies
+    from .compilers.compilers import Compiler, CompileResult, RunResult, CompileCheckMode
+    from .dependencies.detect import TV_DepID
+    from .environment import Environment
+    from .mesonlib import OptionOverrideProxy, FileOrString
+    from .cmake.traceparser import CMakeCacheEntry
+
+    OptionDictType = T.Union[T.Dict[str, 'UserOption[T.Any]'], OptionOverrideProxy]
+    MutableKeyedOptionDictType = T.Dict['OptionKey', 'UserOption[T.Any]']
+    KeyedOptionDictType = T.Union[MutableKeyedOptionDictType, OptionOverrideProxy]
+    CompilerCheckCacheKey = T.Tuple[T.Tuple[str, ...], str, FileOrString, T.Tuple[str, ...], CompileCheckMode]
+    # code, args
+    RunCheckCacheKey = T.Tuple[str, T.Tuple[str, ...]]
+
+    # typeshed
+    StrOrBytesPath = T.Union[str, bytes, os.PathLike[str], os.PathLike[bytes]]
+
+# Check major_versions_differ() if changing versioning scheme.
+#
+# Pip requires that RCs are named like this: '0.1.0.rc1'
+# But the corresponding Git tag needs to be '0.1.0rc1'
+version = '1.2.99'
+
+# The next stable version when we are in dev. This is used to allow projects to
+# require meson version >=1.2.0 when using 1.1.99. FeatureNew won't warn when
+# using a feature introduced in 1.2.0 when using Meson 1.1.99.
+stable_version = version
+if stable_version.endswith('.99'):
+    stable_version_array = stable_version.split('.')
+    stable_version_array[-1] = '0'
+    stable_version_array[-2] = str(int(stable_version_array[-2]) + 1)
+    stable_version = '.'.join(stable_version_array)
+
+backendlist = ['ninja', 'vs', 'vs2010', 'vs2012', 'vs2013', 'vs2015', 'vs2017', 'vs2019', 'vs2022', 'xcode', 'none']
+genvslitelist = ['vs2022']
+buildtypelist = ['plain', 'debug', 'debugoptimized', 'release', 'minsize', 'custom']
+
+DEFAULT_YIELDING = False
+
+# Can't bind this near the class method it seems, sadly.
+_T = T.TypeVar('_T')
+
+
+def get_genvs_default_buildtype_list() -> list[str]:
+    # just debug, debugoptimized, and release for now
+    # but this should probably be configurable through some extra option, alongside --genvslite.
+    return buildtypelist[1:-2]
+
+
+class MesonVersionMismatchException(MesonException):
+    '''Build directory generated with Meson version is incompatible with current version'''
+    def __init__(self, old_version: str, current_version: str) -> None:
+        super().__init__(f'Build directory has been generated with Meson version {old_version}, '
+                         f'which is incompatible with the current version {current_version}.')
+        self.old_version = old_version
+        self.current_version = current_version
+
+
+class UserOption(T.Generic[_T], HoldableObject):
+    def __init__(self, description: str, choices: T.Optional[T.Union[str, T.List[_T]]],
+                 yielding: bool,
+                 deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False):
+        super().__init__()
+        self.choices = choices
+        self.description = description
+        if not isinstance(yielding, bool):
+            raise MesonException('Value of "yielding" must be a boolean.')
+        self.yielding = yielding
+        self.deprecated = deprecated
+        self.readonly = False
+
+    def listify(self, value: T.Any) -> T.List[T.Any]:
+        return [value]
+
+    def printable_value(self) -> T.Union[str, int, bool, T.List[T.Union[str, int, bool]]]:
+        assert isinstance(self.value, (str, int, bool, list))
+        return self.value
+
+    # Check that the input is a valid value and return the
+    # "cleaned" or "native" version. For example the Boolean
+    # option could take the string "true" and return True.
+    def validate_value(self, value: T.Any) -> _T:
+        raise RuntimeError('Derived option class did not override validate_value.')
+
+    def set_value(self, newvalue: T.Any) -> bool:
+        oldvalue = getattr(self, 'value', None)
+        self.value = self.validate_value(newvalue)
+        return self.value != oldvalue
+
+class UserStringOption(UserOption[str]):
+    def __init__(self, description: str, value: T.Any, yielding: bool = DEFAULT_YIELDING,
+                 deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False):
+        super().__init__(description, None, yielding, deprecated)
+        self.set_value(value)
+
+    def validate_value(self, value: T.Any) -> str:
+        if not isinstance(value, str):
+            raise MesonException('Value "%s" for string option is not a string.' % str(value))
+        return value
+
+class UserBooleanOption(UserOption[bool]):
+    def __init__(self, description: str, value: bool, yielding: bool = DEFAULT_YIELDING,
+                 deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False):
+        super().__init__(description, [True, False], yielding, deprecated)
+        self.set_value(value)
+
+    def __bool__(self) -> bool:
+        return self.value
+
+    def validate_value(self, value: T.Any) -> bool:
+        if isinstance(value, bool):
+            return value
+        if not isinstance(value, str):
+            raise MesonException(f'Value {value} cannot be converted to a boolean')
+        if value.lower() == 'true':
+            return True
+        if value.lower() == 'false':
+            return False
+        raise MesonException('Value %s is not boolean (true or false).' % value)
+
+class UserIntegerOption(UserOption[int]):
+    def __init__(self, description: str, value: T.Any, yielding: bool = DEFAULT_YIELDING,
+                 deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False):
+        min_value, max_value, default_value = value
+        self.min_value = min_value
+        self.max_value = max_value
+        c: T.List[str] = []
+        if min_value is not None:
+            c.append('>=' + str(min_value))
+        if max_value is not None:
+            c.append('<=' + str(max_value))
+        choices = ', '.join(c)
+        super().__init__(description, choices, yielding, deprecated)
+        self.set_value(default_value)
+
+    def validate_value(self, value: T.Any) -> int:
+        if isinstance(value, str):
+            value = self.toint(value)
+        if not isinstance(value, int):
+            raise MesonException('New value for integer option is not an integer.')
+        if self.min_value is not None and value < self.min_value:
+            raise MesonException('New value %d is less than minimum value %d.' % (value, self.min_value))
+        if self.max_value is not None and value > self.max_value:
+            raise MesonException('New value %d is more than maximum value %d.' % (value, self.max_value))
+        return value
+
+    def toint(self, valuestring: str) -> int:
+        try:
+            return int(valuestring)
+        except ValueError:
+            raise MesonException('Value string "%s" is not convertible to an integer.' % valuestring)
+
+class OctalInt(int):
+    # NinjaBackend.get_user_option_args uses str() to converts it to a command line option
+    # UserUmaskOption.toint() uses int(str, 8) to convert it to an integer
+    # So we need to use oct instead of dec here if we do not want values to be misinterpreted.
+    def __str__(self):
+        return oct(int(self))
+
+class UserUmaskOption(UserIntegerOption, UserOption[T.Union[str, OctalInt]]):
+    def __init__(self, description: str, value: T.Any, yielding: bool = DEFAULT_YIELDING,
+                 deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False):
+        super().__init__(description, (0, 0o777, value), yielding, deprecated)
+        self.choices = ['preserve', '0000-0777']
+
+    def printable_value(self) -> str:
+        if self.value == 'preserve':
+            return self.value
+        return format(self.value, '04o')
+
+    def validate_value(self, value: T.Any) -> T.Union[str, OctalInt]:
+        if value == 'preserve':
+            return 'preserve'
+        return OctalInt(super().validate_value(value))
+
+    def toint(self, valuestring: T.Union[str, OctalInt]) -> int:
+        try:
+            return int(valuestring, 8)
+        except ValueError as e:
+            raise MesonException(f'Invalid mode: {e}')
+
+class UserComboOption(UserOption[str]):
+    def __init__(self, description: str, choices: T.List[str], value: T.Any,
+                 yielding: bool = DEFAULT_YIELDING,
+                 deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False):
+        super().__init__(description, choices, yielding, deprecated)
+        if not isinstance(self.choices, list):
+            raise MesonException('Combo choices must be an array.')
+        for i in self.choices:
+            if not isinstance(i, str):
+                raise MesonException('Combo choice elements must be strings.')
+        self.set_value(value)
+
+    def validate_value(self, value: T.Any) -> str:
+        if value not in self.choices:
+            if isinstance(value, bool):
+                _type = 'boolean'
+            elif isinstance(value, (int, float)):
+                _type = 'number'
+            else:
+                _type = 'string'
+            optionsstring = ', '.join([f'"{item}"' for item in self.choices])
+            raise MesonException('Value "{}" (of type "{}") for combo option "{}" is not one of the choices.'
+                                 ' Possible choices are (as string): {}.'.format(
+                                     value, _type, self.description, optionsstring))
+        return value
+
+class UserArrayOption(UserOption[T.List[str]]):
+    def __init__(self, description: str, value: T.Union[str, T.List[str]],
+                 split_args: bool = False, user_input: bool = False,
+                 allow_dups: bool = False, yielding: bool = DEFAULT_YIELDING,
+                 choices: T.Optional[T.List[str]] = None,
+                 deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False):
+        super().__init__(description, choices if choices is not None else [], yielding, deprecated)
+        self.split_args = split_args
+        self.allow_dups = allow_dups
+        self.value = self.validate_value(value, user_input=user_input)
+
+    def listify(self, value: T.Union[str, T.List[str]], user_input: bool = True) -> T.List[str]:
+        # User input is for options defined on the command line (via -D
+        # options). Users can put their input in as a comma separated
+        # string, but for defining options in meson_options.txt the format
+        # should match that of a combo
+        if not user_input and isinstance(value, str) and not value.startswith('['):
+            raise MesonException('Value does not define an array: ' + value)
+
+        if isinstance(value, str):
+            if value.startswith('['):
+                try:
+                    newvalue = ast.literal_eval(value)
+                except ValueError:
+                    raise MesonException(f'malformed option {value}')
+            elif value == '':
+                newvalue = []
+            else:
+                if self.split_args:
+                    newvalue = split_args(value)
+                else:
+                    newvalue = [v.strip() for v in value.split(',')]
+        elif isinstance(value, list):
+            newvalue = value
+        else:
+            raise MesonException(f'"{value}" should be a string array, but it is not')
+        return newvalue
+
+    def validate_value(self, value: T.Union[str, T.List[str]], user_input: bool = True) -> T.List[str]:
+        newvalue = self.listify(value, user_input)
+
+        if not self.allow_dups and len(set(newvalue)) != len(newvalue):
+            msg = 'Duplicated values in array option is deprecated. ' \
+                  'This will become a hard error in the future.'
+            mlog.deprecation(msg)
+        for i in newvalue:
+            if not isinstance(i, str):
+                raise MesonException(f'String array element "{newvalue!s}" is not a string.')
+        if self.choices:
+            bad = [x for x in newvalue if x not in self.choices]
+            if bad:
+                raise MesonException('Options "{}" are not in allowed choices: "{}"'.format(
+                    ', '.join(bad), ', '.join(self.choices)))
+        return newvalue
+
+    def extend_value(self, value: T.Union[str, T.List[str]]) -> None:
+        """Extend the value with an additional value."""
+        new = self.validate_value(value)
+        self.set_value(self.value + new)
+
+
+class UserFeatureOption(UserComboOption):
+    static_choices = ['enabled', 'disabled', 'auto']
+
+    def __init__(self, description: str, value: T.Any, yielding: bool = DEFAULT_YIELDING,
+                 deprecated: T.Union[bool, str, T.Dict[str, str], T.List[str]] = False):
+        super().__init__(description, self.static_choices, value, yielding, deprecated)
+        self.name: T.Optional[str] = None  # TODO: Refactor options to all store their name
+
+    def is_enabled(self) -> bool:
+        return self.value == 'enabled'
+
+    def is_disabled(self) -> bool:
+        return self.value == 'disabled'
+
+    def is_auto(self) -> bool:
+        return self.value == 'auto'
+
+
+class DependencyCacheType(enum.Enum):
+
+    OTHER = 0
+    PKG_CONFIG = 1
+    CMAKE = 2
+
+    @classmethod
+    def from_type(cls, dep: 'dependencies.Dependency') -> 'DependencyCacheType':
+        # As more types gain search overrides they'll need to be added here
+        if dep.type_name == 'pkgconfig':
+            return cls.PKG_CONFIG
+        if dep.type_name == 'cmake':
+            return cls.CMAKE
+        return cls.OTHER
+
+
+class DependencySubCache:
+
+    def __init__(self, type_: DependencyCacheType):
+        self.types = [type_]
+        self.__cache: T.Dict[T.Tuple[str, ...], 'dependencies.Dependency'] = {}
+
+    def __getitem__(self, key: T.Tuple[str, ...]) -> 'dependencies.Dependency':
+        return self.__cache[key]
+
+    def __setitem__(self, key: T.Tuple[str, ...], value: 'dependencies.Dependency') -> None:
+        self.__cache[key] = value
+
+    def __contains__(self, key: T.Tuple[str, ...]) -> bool:
+        return key in self.__cache
+
+    def values(self) -> T.Iterable['dependencies.Dependency']:
+        return self.__cache.values()
+
+
+class DependencyCache:
+
+    """Class that stores a cache of dependencies.
+
+    This class is meant to encapsulate the fact that we need multiple keys to
+    successfully lookup by providing a simple get/put interface.
+    """
+
+    def __init__(self, builtins: 'KeyedOptionDictType', for_machine: MachineChoice):
+        self.__cache: T.MutableMapping[TV_DepID, DependencySubCache] = OrderedDict()
+        self.__builtins = builtins
+        self.__pkg_conf_key = OptionKey('pkg_config_path', machine=for_machine)
+        self.__cmake_key = OptionKey('cmake_prefix_path', machine=for_machine)
+
+    def __calculate_subkey(self, type_: DependencyCacheType) -> T.Tuple[str, ...]:
+        data: T.Dict[DependencyCacheType, T.List[str]] = {
+            DependencyCacheType.PKG_CONFIG: stringlistify(self.__builtins[self.__pkg_conf_key].value),
+            DependencyCacheType.CMAKE: stringlistify(self.__builtins[self.__cmake_key].value),
+            DependencyCacheType.OTHER: [],
+        }
+        assert type_ in data, 'Someone forgot to update subkey calculations for a new type'
+        return tuple(data[type_])
+
+    def __iter__(self) -> T.Iterator['TV_DepID']:
+        return self.keys()
+
+    def put(self, key: 'TV_DepID', dep: 'dependencies.Dependency') -> None:
+        t = DependencyCacheType.from_type(dep)
+        if key not in self.__cache:
+            self.__cache[key] = DependencySubCache(t)
+        subkey = self.__calculate_subkey(t)
+        self.__cache[key][subkey] = dep
+
+    def get(self, key: 'TV_DepID') -> T.Optional['dependencies.Dependency']:
+        """Get a value from the cache.
+
+        If there is no cache entry then None will be returned.
+        """
+        try:
+            val = self.__cache[key]
+        except KeyError:
+            return None
+
+        for t in val.types:
+            subkey = self.__calculate_subkey(t)
+            try:
+                return val[subkey]
+            except KeyError:
+                pass
+        return None
+
+    def values(self) -> T.Iterator['dependencies.Dependency']:
+        for c in self.__cache.values():
+            yield from c.values()
+
+    def keys(self) -> T.Iterator['TV_DepID']:
+        return iter(self.__cache.keys())
+
+    def items(self) -> T.Iterator[T.Tuple['TV_DepID', T.List['dependencies.Dependency']]]:
+        for k, v in self.__cache.items():
+            vs: T.List[dependencies.Dependency] = []
+            for t in v.types:
+                subkey = self.__calculate_subkey(t)
+                if subkey in v:
+                    vs.append(v[subkey])
+            yield k, vs
+
+    def clear(self) -> None:
+        self.__cache.clear()
+
+
+class CMakeStateCache:
+    """Class that stores internal CMake compiler states.
+
+    This cache is used to reduce the startup overhead of CMake by caching
+    all internal CMake compiler variables.
+    """
+
+    def __init__(self) -> None:
+        self.__cache: T.Dict[str, T.Dict[str, T.List[str]]] = {}
+        self.cmake_cache: T.Dict[str, 'CMakeCacheEntry'] = {}
+
+    def __iter__(self) -> T.Iterator[T.Tuple[str, T.Dict[str, T.List[str]]]]:
+        return iter(self.__cache.items())
+
+    def items(self) -> T.Iterator[T.Tuple[str, T.Dict[str, T.List[str]]]]:
+        return iter(self.__cache.items())
+
+    def update(self, language: str, variables: T.Dict[str, T.List[str]]):
+        if language not in self.__cache:
+            self.__cache[language] = {}
+        self.__cache[language].update(variables)
+
+    @property
+    def languages(self) -> T.Set[str]:
+        return set(self.__cache.keys())
+
+
+# Can't bind this near the class method it seems, sadly.
+_V = T.TypeVar('_V')
+
+# This class contains all data that must persist over multiple
+# invocations of Meson. It is roughly the same thing as
+# cmakecache.
+
+class CoreData:
+
+    def __init__(self, options: argparse.Namespace, scratch_dir: str, meson_command: T.List[str]):
+        self.lang_guids = {
+            'default': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
+            'c': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
+            'cpp': '8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942',
+            'test': '3AC096D0-A1C2-E12C-1390-A8335801FDAB',
+            'directory': '2150E333-8FDC-42A3-9474-1A3956D46DE8',
+        }
+        self.test_guid = str(uuid.uuid4()).upper()
+        self.regen_guid = str(uuid.uuid4()).upper()
+        self.install_guid = str(uuid.uuid4()).upper()
+        self.meson_command = meson_command
+        self.target_guids = {}
+        self.version = version
+        self.options: 'MutableKeyedOptionDictType' = {}
+        self.cross_files = self.__load_config_files(options, scratch_dir, 'cross')
+        self.compilers: PerMachine[T.Dict[str, Compiler]] = PerMachine(OrderedDict(), OrderedDict())
+
+        # Set of subprojects that have already been initialized once, this is
+        # required to be stored and reloaded with the coredata, as we don't
+        # want to overwrite options for such subprojects.
+        self.initialized_subprojects: T.Set[str] = set()
+
+        # For host == build configurations these caches should be the same.
+        self.deps: PerMachine[DependencyCache] = PerMachineDefaultable.default(
+            self.is_cross_build(),
+            DependencyCache(self.options, MachineChoice.BUILD),
+            DependencyCache(self.options, MachineChoice.HOST))
+
+        self.compiler_check_cache: T.Dict['CompilerCheckCacheKey', 'CompileResult'] = OrderedDict()
+        self.run_check_cache: T.Dict['RunCheckCacheKey', 'RunResult'] = OrderedDict()
+
+        # CMake cache
+        self.cmake_cache: PerMachine[CMakeStateCache] = PerMachine(CMakeStateCache(), CMakeStateCache())
+
+        # Only to print a warning if it changes between Meson invocations.
+        self.config_files = self.__load_config_files(options, scratch_dir, 'native')
+        self.builtin_options_libdir_cross_fixup()
+        self.init_builtins('')
+
+    @staticmethod
+    def __load_config_files(options: argparse.Namespace, scratch_dir: str, ftype: str) -> T.List[str]:
+        # Need to try and make the passed filenames absolute because when the
+        # files are parsed later we'll have chdir()d.
+        if ftype == 'cross':
+            filenames = options.cross_file
+        else:
+            filenames = options.native_file
+
+        if not filenames:
+            return []
+
+        found_invalid: T.List[str] = []
+        missing: T.List[str] = []
+        real: T.List[str] = []
+        for i, f in enumerate(filenames):
+            f = os.path.expanduser(os.path.expandvars(f))
+            if os.path.exists(f):
+                if os.path.isfile(f):
+                    real.append(os.path.abspath(f))
+                    continue
+                elif os.path.isdir(f):
+                    found_invalid.append(os.path.abspath(f))
+                else:
+                    # in this case we've been passed some kind of pipe, copy
+                    # the contents of that file into the meson private (scratch)
+                    # directory so that it can be re-read when wiping/reconfiguring
+                    copy = os.path.join(scratch_dir, f'{uuid.uuid4()}.{ftype}.ini')
+                    with open(f, encoding='utf-8') as rf:
+                        with open(copy, 'w', encoding='utf-8') as wf:
+                            wf.write(rf.read())
+                    real.append(copy)
+
+                    # Also replace the command line argument, as the pipe
+                    # probably won't exist on reconfigure
+                    filenames[i] = copy
+                    continue
+            if sys.platform != 'win32':
+                paths = [
+                    os.environ.get('XDG_DATA_HOME', os.path.expanduser('~/.local/share')),
+                ] + os.environ.get('XDG_DATA_DIRS', '/usr/local/share:/usr/share').split(':')
+                for path in paths:
+                    path_to_try = os.path.join(path, 'meson', ftype, f)
+                    if os.path.isfile(path_to_try):
+                        real.append(path_to_try)
+                        break
+                else:
+                    missing.append(f)
+            else:
+                missing.append(f)
+
+        if missing:
+            if found_invalid:
+                mlog.log('Found invalid candidates for', ftype, 'file:', *found_invalid)
+            mlog.log('Could not find any valid candidate for', ftype, 'files:', *missing)
+            raise MesonException(f'Cannot find specified {ftype} file: {f}')
+        return real
+
+    def builtin_options_libdir_cross_fixup(self):
+        # By default set libdir to "lib" when cross compiling since
+        # getting the "system default" is always wrong on multiarch
+        # platforms as it gets a value like lib/x86_64-linux-gnu.
+        if self.cross_files:
+            BUILTIN_OPTIONS[OptionKey('libdir')].default = 'lib'
+
+    def sanitize_prefix(self, prefix: str) -> str:
+        prefix = os.path.expanduser(prefix)
+        if not os.path.isabs(prefix):
+            raise MesonException(f'prefix value {prefix!r} must be an absolute path')
+        if prefix.endswith('/') or prefix.endswith('\\'):
+            # On Windows we need to preserve the trailing slash if the
+            # string is of type 'C:\' because 'C:' is not an absolute path.
+            if len(prefix) == 3 and prefix[1] == ':':
+                pass
+            # If prefix is a single character, preserve it since it is
+            # the root directory.
+            elif len(prefix) == 1:
+                pass
+            else:
+                prefix = prefix[:-1]
+        return prefix
+
+    def sanitize_dir_option_value(self, prefix: str, option: OptionKey, value: T.Any) -> T.Any:
+        '''
+        If the option is an installation directory option, the value is an
+        absolute path and resides within prefix, return the value
+        as a path relative to the prefix. Otherwise, return it as is.
+
+        This way everyone can do f.ex, get_option('libdir') and usually get
+        the library directory relative to prefix, even though it really
+        should not be relied upon.
+        '''
+        try:
+            value = PurePath(value)
+        except TypeError:
+            return value
+        if option.name.endswith('dir') and value.is_absolute() and \
+           option not in BUILTIN_DIR_NOPREFIX_OPTIONS:
+            try:
+                # Try to relativize the path.
+                value = value.relative_to(prefix)
+            except ValueError:
+                # Path is not relative, let’s keep it as is.
+                pass
+            if '..' in value.parts:
+                raise MesonException(
+                    f'The value of the \'{option}\' option is \'{value}\' but '
+                    'directory options are not allowed to contain \'..\'.\n'
+                    f'If you need a path outside of the {prefix!r} prefix, '
+                    'please use an absolute path.'
+                )
+        # .as_posix() keeps the posix-like file separators Meson uses.
+        return value.as_posix()
+
+    def init_builtins(self, subproject: str) -> None:
+        # Create builtin options with default values
+        for key, opt in BUILTIN_OPTIONS.items():
+            self.add_builtin_option(self.options, key.evolve(subproject=subproject), opt)
+        for for_machine in iter(MachineChoice):
+            for key, opt in BUILTIN_OPTIONS_PER_MACHINE.items():
+                self.add_builtin_option(self.options, key.evolve(subproject=subproject, machine=for_machine), opt)
+
+    @staticmethod
+    def add_builtin_option(opts_map: 'MutableKeyedOptionDictType', key: OptionKey,
+                           opt: 'BuiltinOption') -> None:
+        if key.subproject:
+            if opt.yielding:
+                # This option is global and not per-subproject
+                return
+            value = opts_map[key.as_root()].value
+        else:
+            value = None
+        opts_map[key] = opt.init_option(key, value, default_prefix())
+
+    def init_backend_options(self, backend_name: str) -> None:
+        if backend_name == 'ninja':
+            self.options[OptionKey('backend_max_links')] = UserIntegerOption(
+                'Maximum number of linker processes to run or 0 for no '
+                'limit',
+                (0, None, 0))
+        elif backend_name.startswith('vs'):
+            self.options[OptionKey('backend_startup_project')] = UserStringOption(
+                'Default project to execute in Visual Studio',
+                '')
+
+    def get_option(self, key: OptionKey) -> T.Union[T.List[str], str, int, bool, WrapMode]:
+        try:
+            v = self.options[key].value
+            if key.name == 'wrap_mode':
+                return WrapMode[v]
+            return v
+        except KeyError:
+            pass
+
+        try:
+            v = self.options[key.as_root()]
+            if v.yielding:
+                if key.name == 'wrap_mode':
+                    return WrapMode[v.value]
+                return v.value
+        except KeyError:
+            pass
+
+        raise MesonException(f'Tried to get unknown builtin option {str(key)}')
+
+    def set_option(self, key: OptionKey, value, first_invocation: bool = False) -> bool:
+        dirty = False
+        if key.is_builtin():
+            if key.name == 'prefix':
+                value = self.sanitize_prefix(value)
+            else:
+                prefix = self.options[OptionKey('prefix')].value
+                value = self.sanitize_dir_option_value(prefix, key, value)
+
+        try:
+            opt = self.options[key]
+        except KeyError:
+            raise MesonException(f'Tried to set unknown builtin option {str(key)}')
+
+        if opt.deprecated is True:
+            mlog.deprecation(f'Option {key.name!r} is deprecated')
+        elif isinstance(opt.deprecated, list):
+            for v in opt.listify(value):
+                if v in opt.deprecated:
+                    mlog.deprecation(f'Option {key.name!r} value {v!r} is deprecated')
+        elif isinstance(opt.deprecated, dict):
+            def replace(v):
+                newvalue = opt.deprecated.get(v)
+                if newvalue is not None:
+                    mlog.deprecation(f'Option {key.name!r} value {v!r} is replaced by {newvalue!r}')
+                    return newvalue
+                return v
+            newvalue = [replace(v) for v in opt.listify(value)]
+            value = ','.join(newvalue)
+        elif isinstance(opt.deprecated, str):
+            # Option is deprecated and replaced by another. Note that a project
+            # option could be replaced by a built-in or module option, which is
+            # why we use OptionKey.from_string(newname) instead of
+            # key.evolve(newname). We set the value on both the old and new names,
+            # assuming they accept the same value. That could for example be
+            # achieved by adding the values from old option as deprecated on the
+            # new option, for example in the case of boolean option is replaced
+            # by a feature option with a different name.
+            newname = opt.deprecated
+            newkey = OptionKey.from_string(newname).evolve(subproject=key.subproject)
+            mlog.deprecation(f'Option {key.name!r} is replaced by {newname!r}')
+            dirty |= self.set_option(newkey, value, first_invocation)
+
+        changed = opt.set_value(value)
+        if changed and opt.readonly and not first_invocation:
+            raise MesonException(f'Tried modify read only option {str(key)!r}')
+        dirty |= changed
+
+        if key.name == 'buildtype':
+            dirty |= self._set_others_from_buildtype(value)
+
+        return dirty
+
+    def clear_cache(self) -> None:
+        self.deps.host.clear()
+        self.deps.build.clear()
+        self.compiler_check_cache.clear()
+        self.run_check_cache.clear()
+
+    def get_nondefault_buildtype_args(self) -> T.List[T.Union[T.Tuple[str, str, str], T.Tuple[str, bool, bool]]]:
+        result: T.List[T.Union[T.Tuple[str, str, str], T.Tuple[str, bool, bool]]] = []
+        value = self.options[OptionKey('buildtype')].value
+        if value == 'plain':
+            opt = 'plain'
+            debug = False
+        elif value == 'debug':
+            opt = '0'
+            debug = True
+        elif value == 'debugoptimized':
+            opt = '2'
+            debug = True
+        elif value == 'release':
+            opt = '3'
+            debug = False
+        elif value == 'minsize':
+            opt = 's'
+            debug = True
+        else:
+            assert value == 'custom'
+            return []
+        actual_opt = self.options[OptionKey('optimization')].value
+        actual_debug = self.options[OptionKey('debug')].value
+        if actual_opt != opt:
+            result.append(('optimization', actual_opt, opt))
+        if actual_debug != debug:
+            result.append(('debug', actual_debug, debug))
+        return result
+
+    def _set_others_from_buildtype(self, value: str) -> bool:
+        dirty = False
+
+        if value == 'plain':
+            opt = 'plain'
+            debug = False
+        elif value == 'debug':
+            opt = '0'
+            debug = True
+        elif value == 'debugoptimized':
+            opt = '2'
+            debug = True
+        elif value == 'release':
+            opt = '3'
+            debug = False
+        elif value == 'minsize':
+            opt = 's'
+            debug = True
+        else:
+            assert value == 'custom'
+            return False
+
+        dirty |= self.options[OptionKey('optimization')].set_value(opt)
+        dirty |= self.options[OptionKey('debug')].set_value(debug)
+
+        return dirty
+
+    @staticmethod
+    def is_per_machine_option(optname: OptionKey) -> bool:
+        if optname.name in BUILTIN_OPTIONS_PER_MACHINE:
+            return True
+        return optname.lang is not None
+
+    def get_external_args(self, for_machine: MachineChoice, lang: str) -> T.List[str]:
+        return self.options[OptionKey('args', machine=for_machine, lang=lang)].value
+
+    def get_external_link_args(self, for_machine: MachineChoice, lang: str) -> T.List[str]:
+        return self.options[OptionKey('link_args', machine=for_machine, lang=lang)].value
+
+    def update_project_options(self, options: 'MutableKeyedOptionDictType') -> None:
+        for key, value in options.items():
+            if not key.is_project():
+                continue
+            if key not in self.options:
+                self.options[key] = value
+                continue
+
+            oldval = self.options[key]
+            if type(oldval) != type(value):
+                self.options[key] = value
+            elif oldval.choices != value.choices:
+                # If the choices have changed, use the new value, but attempt
+                # to keep the old options. If they are not valid keep the new
+                # defaults but warn.
+                self.options[key] = value
+                try:
+                    value.set_value(oldval.value)
+                except MesonException:
+                    mlog.warning(f'Old value(s) of {key} are no longer valid, resetting to default ({value.value}).',
+                                 fatal=False)
+
+    def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
+        if when_building_for == MachineChoice.BUILD:
+            return False
+        return len(self.cross_files) > 0
+
+    def copy_build_options_from_regular_ones(self) -> bool:
+        dirty = False
+        assert not self.is_cross_build()
+        for k in BUILTIN_OPTIONS_PER_MACHINE:
+            o = self.options[k]
+            dirty |= self.options[k.as_build()].set_value(o.value)
+        for bk, bv in self.options.items():
+            if bk.machine is MachineChoice.BUILD:
+                hk = bk.as_host()
+                try:
+                    hv = self.options[hk]
+                    dirty |= bv.set_value(hv.value)
+                except KeyError:
+                    continue
+
+        return dirty
+
+    def set_options(self, options: T.Dict[OptionKey, T.Any], subproject: str = '', first_invocation: bool = False) -> bool:
+        dirty = False
+        if not self.is_cross_build():
+            options = {k: v for k, v in options.items() if k.machine is not MachineChoice.BUILD}
+        # Set prefix first because it's needed to sanitize other options
+        pfk = OptionKey('prefix')
+        if pfk in options:
+            prefix = self.sanitize_prefix(options[pfk])
+            dirty |= self.options[OptionKey('prefix')].set_value(prefix)
+            for key in BUILTIN_DIR_NOPREFIX_OPTIONS:
+                if key not in options:
+                    dirty |= self.options[key].set_value(BUILTIN_OPTIONS[key].prefixed_default(key, prefix))
+
+        unknown_options: T.List[OptionKey] = []
+        for k, v in options.items():
+            if k == pfk:
+                continue
+            elif k in self.options:
+                dirty |= self.set_option(k, v, first_invocation)
+            elif k.machine != MachineChoice.BUILD and k.type != OptionType.COMPILER:
+                unknown_options.append(k)
+        if unknown_options:
+            unknown_options_str = ', '.join(sorted(str(s) for s in unknown_options))
+            sub = f'In subproject {subproject}: ' if subproject else ''
+            raise MesonException(f'{sub}Unknown options: "{unknown_options_str}"')
+
+        if not self.is_cross_build():
+            dirty |= self.copy_build_options_from_regular_ones()
+
+        return dirty
+
+    def set_default_options(self, default_options: T.MutableMapping[OptionKey, str], subproject: str, env: 'Environment') -> None:
+        # Main project can set default options on subprojects, but subprojects
+        # can only set default options on themselves.
+        # Preserve order: if env.options has 'buildtype' it must come after
+        # 'optimization' if it is in default_options.
+        options: T.MutableMapping[OptionKey, T.Any] = OrderedDict()
+        for k, v in default_options.items():
+            if not subproject or k.subproject == subproject:
+                options[k] = v
+        options.update(env.options)
+        env.options = options
+
+        # Create a subset of options, keeping only project and builtin
+        # options for this subproject.
+        # Language and backend specific options will be set later when adding
+        # languages and setting the backend (builtin options must be set first
+        # to know which backend we'll use).
+        options = OrderedDict()
+
+        for k, v in env.options.items():
+            # If this is a subproject, don't use other subproject options
+            if k.subproject and k.subproject != subproject:
+                continue
+            # If the option is a builtin and is yielding then it's not allowed per subproject.
+            #
+            # Always test this using the HOST machine, as many builtin options
+            # are not valid for the BUILD machine, but the yielding value does
+            # not differ between them even when they are valid for both.
+            if subproject and k.is_builtin() and self.options[k.evolve(subproject='', machine=MachineChoice.HOST)].yielding:
+                continue
+            # Skip base, compiler, and backend options, they are handled when
+            # adding languages and setting backend.
+            if k.type in {OptionType.COMPILER, OptionType.BACKEND, OptionType.BASE}:
+                continue
+            options[k] = v
+
+        self.set_options(options, subproject=subproject, first_invocation=env.first_invocation)
+
+    def add_compiler_options(self, options: 'MutableKeyedOptionDictType', lang: str, for_machine: MachineChoice,
+                             env: 'Environment') -> None:
+        for k, o in options.items():
+            value = env.options.get(k)
+            if value is not None:
+                o.set_value(value)
+            self.options.setdefault(k, o)
+
+    def add_lang_args(self, lang: str, comp: T.Type['Compiler'],
+                      for_machine: MachineChoice, env: 'Environment') -> None:
+        """Add global language arguments that are needed before compiler/linker detection."""
+        from .compilers import compilers
+        # These options are all new at this point, because the compiler is
+        # responsible for adding its own options, thus calling
+        # `self.options.update()`` is perfectly safe.
+        self.options.update(compilers.get_global_options(lang, comp, for_machine, env))
+
+    def process_new_compiler(self, lang: str, comp: 'Compiler', env: 'Environment') -> None:
+        from . import compilers
+
+        self.add_compiler_options(comp.get_options(), lang, comp.for_machine, env)
+
+        enabled_opts: T.List[OptionKey] = []
+        for key in comp.base_options:
+            if key in self.options:
+                continue
+            oobj = copy.deepcopy(compilers.base_options[key])
+            if key in env.options:
+                oobj.set_value(env.options[key])
+                enabled_opts.append(key)
+            self.options[key] = oobj
+        self.emit_base_options_warnings(enabled_opts)
+
+    def emit_base_options_warnings(self, enabled_opts: T.List[OptionKey]) -> None:
+        if OptionKey('b_bitcode') in enabled_opts:
+            mlog.warning('Base option \'b_bitcode\' is enabled, which is incompatible with many linker options. Incompatible options such as \'b_asneeded\' have been disabled.', fatal=False)
+            mlog.warning('Please see https://mesonbuild.com/Builtin-options.html#Notes_about_Apple_Bitcode_support for more details.', fatal=False)
+
+class CmdLineFileParser(configparser.ConfigParser):
+    def __init__(self) -> None:
+        # We don't want ':' as key delimiter, otherwise it would break when
+        # storing subproject options like "subproject:option=value"
+        super().__init__(delimiters=['='], interpolation=None)
+
+    def read(self, filenames: T.Union['StrOrBytesPath', T.Iterable['StrOrBytesPath']], encoding: T.Optional[str] = 'utf-8') -> T.List[str]:
+        return super().read(filenames, encoding)
+
+    def optionxform(self, optionstr: str) -> str:
+        # Don't call str.lower() on keys
+        return optionstr
+
+class MachineFileParser():
+    def __init__(self, filenames: T.List[str]) -> None:
+        self.parser = CmdLineFileParser()
+        self.constants: T.Dict[str, T.Union[str, bool, int, T.List[str]]] = {'True': True, 'False': False}
+        self.sections: T.Dict[str, T.Dict[str, T.Union[str, bool, int, T.List[str]]]] = {}
+
+        try:
+            self.parser.read(filenames)
+        except configparser.Error as e:
+            raise EnvironmentException(f'Malformed cross or native file: {e}')
+
+        # Parse [constants] first so they can be used in other sections
+        if self.parser.has_section('constants'):
+            self.constants.update(self._parse_section('constants'))
+
+        for s in self.parser.sections():
+            if s == 'constants':
+                continue
+            self.sections[s] = self._parse_section(s)
+
+    def _parse_section(self, s: str) -> T.Dict[str, T.Union[str, bool, int, T.List[str]]]:
+        self.scope = self.constants.copy()
+        section: T.Dict[str, T.Union[str, bool, int, T.List[str]]] = {}
+        for entry, value in self.parser.items(s):
+            if ' ' in entry or '\t' in entry or "'" in entry or '"' in entry:
+                raise EnvironmentException(f'Malformed variable name {entry!r} in machine file.')
+            # Windows paths...
+            value = value.replace('\\', '\\\\')
+            try:
+                ast = mparser.Parser(value, 'machinefile').parse()
+                res = self._evaluate_statement(ast.lines[0])
+            except MesonException:
+                raise EnvironmentException(f'Malformed value in machine file variable {entry!r}.')
+            except KeyError as e:
+                raise EnvironmentException(f'Undefined constant {e.args[0]!r} in machine file variable {entry!r}.')
+            section[entry] = res
+            self.scope[entry] = res
+        return section
+
+    def _evaluate_statement(self, node: mparser.BaseNode) -> T.Union[str, bool, int, T.List[str]]:
+        if isinstance(node, (mparser.StringNode)):
+            return node.value
+        elif isinstance(node, mparser.BooleanNode):
+            return node.value
+        elif isinstance(node, mparser.NumberNode):
+            return node.value
+        elif isinstance(node, mparser.ArrayNode):
+            # TODO: This is where recursive types would come in handy
+            return [self._evaluate_statement(arg) for arg in node.args.arguments]
+        elif isinstance(node, mparser.IdNode):
+            return self.scope[node.value]
+        elif isinstance(node, mparser.ArithmeticNode):
+            l = self._evaluate_statement(node.left)
+            r = self._evaluate_statement(node.right)
+            if node.operation == 'add':
+                if (isinstance(l, str) and isinstance(r, str)) or \
+                   (isinstance(l, list) and isinstance(r, list)):
+                    return l + r
+            elif node.operation == 'div':
+                if isinstance(l, str) and isinstance(r, str):
+                    return os.path.join(l, r)
+        raise EnvironmentException('Unsupported node type')
+
+def parse_machine_files(filenames: T.List[str]):
+    parser = MachineFileParser(filenames)
+    return parser.sections
+
+def get_cmd_line_file(build_dir: str) -> str:
+    return os.path.join(build_dir, 'meson-private', 'cmd_line.txt')
+
+def read_cmd_line_file(build_dir: str, options: argparse.Namespace) -> None:
+    filename = get_cmd_line_file(build_dir)
+    if not os.path.isfile(filename):
+        return
+
+    config = CmdLineFileParser()
+    config.read(filename)
+
+    # Do a copy because config is not really a dict. options.cmd_line_options
+    # overrides values from the file.
+    d = {OptionKey.from_string(k): v for k, v in config['options'].items()}
+    d.update(options.cmd_line_options)
+    options.cmd_line_options = d
+
+    properties = config['properties']
+    if not options.cross_file:
+        options.cross_file = ast.literal_eval(properties.get('cross_file', '[]'))
+    if not options.native_file:
+        # This will be a string in the form: "['first', 'second', ...]", use
+        # literal_eval to get it into the list of strings.
+        options.native_file = ast.literal_eval(properties.get('native_file', '[]'))
+
+def write_cmd_line_file(build_dir: str, options: argparse.Namespace) -> None:
+    filename = get_cmd_line_file(build_dir)
+    config = CmdLineFileParser()
+
+    properties: OrderedDict[str, str] = OrderedDict()
+    if options.cross_file:
+        properties['cross_file'] = options.cross_file
+    if options.native_file:
+        properties['native_file'] = options.native_file
+
+    config['options'] = {str(k): str(v) for k, v in options.cmd_line_options.items()}
+    config['properties'] = properties
+    with open(filename, 'w', encoding='utf-8') as f:
+        config.write(f)
+
+def update_cmd_line_file(build_dir: str, options: argparse.Namespace):
+    filename = get_cmd_line_file(build_dir)
+    config = CmdLineFileParser()
+    config.read(filename)
+    config['options'].update({str(k): str(v) for k, v in options.cmd_line_options.items()})
+    with open(filename, 'w', encoding='utf-8') as f:
+        config.write(f)
+
+def format_cmd_line_options(options: argparse.Namespace) -> str:
+    cmdline = ['-D{}={}'.format(str(k), v) for k, v in options.cmd_line_options.items()]
+    if options.cross_file:
+        cmdline += [f'--cross-file={f}' for f in options.cross_file]
+    if options.native_file:
+        cmdline += [f'--native-file={f}' for f in options.native_file]
+    return ' '.join([shlex.quote(x) for x in cmdline])
+
+def major_versions_differ(v1: str, v2: str) -> bool:
+    v1_major, v1_minor = v1.rsplit('.', 1)
+    v2_major, v2_minor = v2.rsplit('.', 1)
+    # Major version differ, or one is development version but not the other.
+    return v1_major != v2_major or ('99' in {v1_minor, v2_minor} and v1_minor != v2_minor)
+
+def load(build_dir: str) -> CoreData:
+    filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
+    return pickle_load(filename, 'Coredata', CoreData)
+
+
+def save(obj: CoreData, build_dir: str) -> str:
+    filename = os.path.join(build_dir, 'meson-private', 'coredata.dat')
+    prev_filename = filename + '.prev'
+    tempfilename = filename + '~'
+    if major_versions_differ(obj.version, version):
+        raise MesonException('Fatal version mismatch corruption.')
+    if os.path.exists(filename):
+        import shutil
+        shutil.copyfile(filename, prev_filename)
+    with open(tempfilename, 'wb') as f:
+        pickle.dump(obj, f)
+        f.flush()
+        os.fsync(f.fileno())
+    os.replace(tempfilename, filename)
+    return filename
+
+
+def register_builtin_arguments(parser: argparse.ArgumentParser) -> None:
+    for n, b in BUILTIN_OPTIONS.items():
+        b.add_to_argparse(str(n), parser, '')
+    for n, b in BUILTIN_OPTIONS_PER_MACHINE.items():
+        b.add_to_argparse(str(n), parser, ' (just for host machine)')
+        b.add_to_argparse(str(n.as_build()), parser, ' (just for build machine)')
+    parser.add_argument('-D', action='append', dest='projectoptions', default=[], metavar="option",
+                        help='Set the value of an option, can be used several times to set multiple options.')
+
+def create_options_dict(options: T.List[str], subproject: str = '') -> T.Dict[OptionKey, str]:
+    result: T.OrderedDict[OptionKey, str] = OrderedDict()
+    for o in options:
+        try:
+            (key, value) = o.split('=', 1)
+        except ValueError:
+            raise MesonException(f'Option {o!r} must have a value separated by equals sign.')
+        k = OptionKey.from_string(key)
+        if subproject:
+            k = k.evolve(subproject=subproject)
+        result[k] = value
+    return result
+
+def parse_cmd_line_options(args: argparse.Namespace) -> None:
+    args.cmd_line_options = create_options_dict(args.projectoptions)
+
+    # Merge builtin options set with --option into the dict.
+    for key in chain(
+            BUILTIN_OPTIONS.keys(),
+            (k.as_build() for k in BUILTIN_OPTIONS_PER_MACHINE.keys()),
+            BUILTIN_OPTIONS_PER_MACHINE.keys(),
+    ):
+        name = str(key)
+        value = getattr(args, name, None)
+        if value is not None:
+            if key in args.cmd_line_options:
+                cmdline_name = BuiltinOption.argparse_name_to_arg(name)
+                raise MesonException(
+                    f'Got argument {name} as both -D{name} and {cmdline_name}. Pick one.')
+            args.cmd_line_options[key] = value
+            delattr(args, name)
+
+
+_U = T.TypeVar('_U', bound=UserOption[_T])
+
+class BuiltinOption(T.Generic[_T, _U]):
+
+    """Class for a builtin option type.
+
+    There are some cases that are not fully supported yet.
+    """
+
+    def __init__(self, opt_type: T.Type[_U], description: str, default: T.Any, yielding: bool = True, *,
+                 choices: T.Any = None, readonly: bool = False):
+        self.opt_type = opt_type
+        self.description = description
+        self.default = default
+        self.choices = choices
+        self.yielding = yielding
+        self.readonly = readonly
+
+    def init_option(self, name: 'OptionKey', value: T.Optional[T.Any], prefix: str) -> _U:
+        """Create an instance of opt_type and return it."""
+        if value is None:
+            value = self.prefixed_default(name, prefix)
+        keywords = {'yielding': self.yielding, 'value': value}
+        if self.choices:
+            keywords['choices'] = self.choices
+        o = self.opt_type(self.description, **keywords)
+        o.readonly = self.readonly
+        return o
+
+    def _argparse_action(self) -> T.Optional[str]:
+        # If the type is a boolean, the presence of the argument in --foo form
+        # is to enable it. Disabling happens by using -Dfoo=false, which is
+        # parsed under `args.projectoptions` and does not hit this codepath.
+        if isinstance(self.default, bool):
+            return 'store_true'
+        return None
+
+    def _argparse_choices(self) -> T.Any:
+        if self.opt_type is UserBooleanOption:
+            return [True, False]
+        elif self.opt_type is UserFeatureOption:
+            return UserFeatureOption.static_choices
+        return self.choices
+
+    @staticmethod
+    def argparse_name_to_arg(name: str) -> str:
+        if name == 'warning_level':
+            return '--warnlevel'
+        else:
+            return '--' + name.replace('_', '-')
+
+    def prefixed_default(self, name: 'OptionKey', prefix: str = '') -> T.Any:
+        if self.opt_type in [UserComboOption, UserIntegerOption]:
+            return self.default
+        try:
+            return BUILTIN_DIR_NOPREFIX_OPTIONS[name][prefix]
+        except KeyError:
+            pass
+        return self.default
+
+    def add_to_argparse(self, name: str, parser: argparse.ArgumentParser, help_suffix: str) -> None:
+        kwargs = OrderedDict()
+
+        c = self._argparse_choices()
+        b = self._argparse_action()
+        h = self.description
+        if not b:
+            h = '{} (default: {}).'.format(h.rstrip('.'), self.prefixed_default(name))
+        else:
+            kwargs['action'] = b
+        if c and not b:
+            kwargs['choices'] = c
+        kwargs['default'] = argparse.SUPPRESS
+        kwargs['dest'] = name
+
+        cmdline_name = self.argparse_name_to_arg(name)
+        parser.add_argument(cmdline_name, help=h + help_suffix, **kwargs)
+
+
+# Update `docs/markdown/Builtin-options.md` after changing the options below
+# Also update mesonlib._BUILTIN_NAMES. See the comment there for why this is required.
+BUILTIN_DIR_OPTIONS: 'MutableKeyedOptionDictType' = OrderedDict([
+    (OptionKey('prefix'),          BuiltinOption(UserStringOption, 'Installation prefix', default_prefix())),
+    (OptionKey('bindir'),          BuiltinOption(UserStringOption, 'Executable directory', 'bin')),
+    (OptionKey('datadir'),         BuiltinOption(UserStringOption, 'Data file directory', default_datadir())),
+    (OptionKey('includedir'),      BuiltinOption(UserStringOption, 'Header file directory', default_includedir())),
+    (OptionKey('infodir'),         BuiltinOption(UserStringOption, 'Info page directory', default_infodir())),
+    (OptionKey('libdir'),          BuiltinOption(UserStringOption, 'Library directory', default_libdir())),
+    (OptionKey('licensedir'),      BuiltinOption(UserStringOption, 'Licenses directory', '')),
+    (OptionKey('libexecdir'),      BuiltinOption(UserStringOption, 'Library executable directory', default_libexecdir())),
+    (OptionKey('localedir'),       BuiltinOption(UserStringOption, 'Locale data directory', default_localedir())),
+    (OptionKey('localstatedir'),   BuiltinOption(UserStringOption, 'Localstate data directory', 'var')),
+    (OptionKey('mandir'),          BuiltinOption(UserStringOption, 'Manual page directory', default_mandir())),
+    (OptionKey('sbindir'),         BuiltinOption(UserStringOption, 'System executable directory', default_sbindir())),
+    (OptionKey('sharedstatedir'),  BuiltinOption(UserStringOption, 'Architecture-independent data directory', 'com')),
+    (OptionKey('sysconfdir'),      BuiltinOption(UserStringOption, 'Sysconf data directory', default_sysconfdir())),
+])
+
+BUILTIN_CORE_OPTIONS: 'MutableKeyedOptionDictType' = OrderedDict([
+    (OptionKey('auto_features'),   BuiltinOption(UserFeatureOption, "Override value of all 'auto' features", 'auto')),
+    (OptionKey('backend'),         BuiltinOption(UserComboOption, 'Backend to use', 'ninja', choices=backendlist,
+                                                 readonly=True)),
+    (OptionKey('genvslite'),
+     BuiltinOption(
+         UserComboOption,
+         'Setup multiple buildtype-suffixed ninja-backend build directories, '
+         'and a [builddir]_vs containing a Visual Studio meta-backend with multiple configurations that calls into them',
+         'vs2022',
+         choices=genvslitelist)
+     ),
+    (OptionKey('buildtype'),       BuiltinOption(UserComboOption, 'Build type to use', 'debug',
+                                                 choices=buildtypelist)),
+    (OptionKey('debug'),           BuiltinOption(UserBooleanOption, 'Enable debug symbols and other information', True)),
+    (OptionKey('default_library'), BuiltinOption(UserComboOption, 'Default library type', 'shared', choices=['shared', 'static', 'both'],
+                                                 yielding=False)),
+    (OptionKey('errorlogs'),       BuiltinOption(UserBooleanOption, "Whether to print the logs from failing tests", True)),
+    (OptionKey('install_umask'),   BuiltinOption(UserUmaskOption, 'Default umask to apply on permissions of installed files', '022')),
+    (OptionKey('layout'),          BuiltinOption(UserComboOption, 'Build directory layout', 'mirror', choices=['mirror', 'flat'])),
+    (OptionKey('optimization'),    BuiltinOption(UserComboOption, 'Optimization level', '0', choices=['plain', '0', 'g', '1', '2', '3', 's'])),
+    (OptionKey('prefer_static'),   BuiltinOption(UserBooleanOption, 'Whether to try static linking before shared linking', False)),
+    (OptionKey('stdsplit'),        BuiltinOption(UserBooleanOption, 'Split stdout and stderr in test logs', True)),
+    (OptionKey('strip'),           BuiltinOption(UserBooleanOption, 'Strip targets on install', False)),
+    (OptionKey('unity'),           BuiltinOption(UserComboOption, 'Unity build', 'off', choices=['on', 'off', 'subprojects'])),
+    (OptionKey('unity_size'),      BuiltinOption(UserIntegerOption, 'Unity block size', (2, None, 4))),
+    (OptionKey('warning_level'),   BuiltinOption(UserComboOption, 'Compiler warning level to use', '1', choices=['0', '1', '2', '3', 'everything'], yielding=False)),
+    (OptionKey('werror'),          BuiltinOption(UserBooleanOption, 'Treat warnings as errors', False, yielding=False)),
+    (OptionKey('wrap_mode'),       BuiltinOption(UserComboOption, 'Wrap mode', 'default', choices=['default', 'nofallback', 'nodownload', 'forcefallback', 'nopromote'])),
+    (OptionKey('force_fallback_for'), BuiltinOption(UserArrayOption, 'Force fallback for those subprojects', [])),
+    (OptionKey('vsenv'),           BuiltinOption(UserBooleanOption, 'Activate Visual Studio environment', False, readonly=True)),
+
+    # Pkgconfig module
+    (OptionKey('relocatable', module='pkgconfig'),
+     BuiltinOption(UserBooleanOption, 'Generate pkgconfig files as relocatable', False)),
+
+    # Python module
+    (OptionKey('bytecompile', module='python'),
+     BuiltinOption(UserIntegerOption, 'Whether to compile bytecode', (-1, 2, 0))),
+    (OptionKey('install_env', module='python'),
+     BuiltinOption(UserComboOption, 'Which python environment to install to', 'prefix', choices=['auto', 'prefix', 'system', 'venv'])),
+    (OptionKey('platlibdir', module='python'),
+     BuiltinOption(UserStringOption, 'Directory for site-specific, platform-specific files.', '')),
+    (OptionKey('purelibdir', module='python'),
+     BuiltinOption(UserStringOption, 'Directory for site-specific, non-platform-specific files.', '')),
+])
+
+BUILTIN_OPTIONS = OrderedDict(chain(BUILTIN_DIR_OPTIONS.items(), BUILTIN_CORE_OPTIONS.items()))
+
+BUILTIN_OPTIONS_PER_MACHINE: 'MutableKeyedOptionDictType' = OrderedDict([
+    (OptionKey('pkg_config_path'), BuiltinOption(UserArrayOption, 'List of additional paths for pkg-config to search', [])),
+    (OptionKey('cmake_prefix_path'), BuiltinOption(UserArrayOption, 'List of additional prefixes for cmake to search', [])),
+])
+
+# Special prefix-dependent defaults for installation directories that reside in
+# a path outside of the prefix in FHS and common usage.
+BUILTIN_DIR_NOPREFIX_OPTIONS: T.Dict[OptionKey, T.Dict[str, str]] = {
+    OptionKey('sysconfdir'):     {'/usr': '/etc'},
+    OptionKey('localstatedir'):  {'/usr': '/var',     '/usr/local': '/var/local'},
+    OptionKey('sharedstatedir'): {'/usr': '/var/lib', '/usr/local': '/var/local/lib'},
+    OptionKey('platlibdir', module='python'): {},
+    OptionKey('purelibdir', module='python'): {},
+}
+
+FORBIDDEN_TARGET_NAMES = frozenset({
+    'clean',
+    'clean-ctlist',
+    'clean-gcno',
+    'clean-gcda',
+    'coverage',
+    'coverage-text',
+    'coverage-xml',
+    'coverage-html',
+    'phony',
+    'PHONY',
+    'all',
+    'test',
+    'benchmark',
+    'install',
+    'uninstall',
+    'build.ninja',
+    'scan-build',
+    'reconfigure',
+    'dist',
+    'distcheck',
+})
diff --git a/vendored-meson/meson/mesonbuild/dependencies/__init__.py b/vendored-meson/meson/mesonbuild/dependencies/__init__.py
new file mode 100644
index 000000000000..c6dabc50eacc
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/__init__.py
@@ -0,0 +1,261 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .base import Dependency, InternalDependency, ExternalDependency, NotFoundDependency, MissingCompiler
+from .base import (
+        ExternalLibrary, DependencyException, DependencyMethods,
+        BuiltinDependency, SystemDependency, get_leaf_external_dependencies)
+from .detect import find_external_dependency, get_dep_identifier, packages, _packages_accept_language
+
+__all__ = [
+    'Dependency',
+    'InternalDependency',
+    'ExternalDependency',
+    'SystemDependency',
+    'BuiltinDependency',
+    'NotFoundDependency',
+    'ExternalLibrary',
+    'DependencyException',
+    'DependencyMethods',
+    'MissingCompiler',
+
+    'find_external_dependency',
+    'get_dep_identifier',
+    'get_leaf_external_dependencies',
+]
+
+"""Dependency representations and discovery logic.
+
+Meson attempts to largely abstract away dependency discovery information, and
+to encapsulate that logic itself so that the DSL doesn't have too much direct
+information. There are some cases where this is impossible/undesirable, such
+as the `get_variable()` method.
+
+Meson has four primary dependency types:
+  1. pkg-config
+  2. apple frameworks
+  3. CMake
+  4. system
+
+Plus a few more niche ones.
+
+When a user calls `dependency('foo')` Meson creates a list of candidates, and
+tries those candidates in order to find one that matches the criteria
+provided by the user (such as version requirements, or optional components
+that are required.)
+
+Except to work around bugs or handle odd corner cases, pkg-config and CMake
+generally just work™, though there are exceptions. Most of this package is
+concerned with dependencies that don't (always) provide CMake and/or
+pkg-config files.
+
+For these cases one needs to write a `system` dependency. These dependencies
+descend directly from `ExternalDependency`, in their constructor they
+manually set up the necessary link and compile args (and additional
+dependencies as necessary).
+
+For example, imagine a dependency called Foo, it uses an environment variable
+called `$FOO_ROOT` to point to its install root, which looks like this:
+```txt
+$FOOROOT
+→ include/
+→ lib/
+```
+To use Foo, you need its include directory, and you need to link to
+`lib/libfoo.ext`.
+
+You could write code that looks like:
+
+```python
+class FooSystemDependency(ExternalDependency):
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        root = os.environ.get('FOO_ROOT')
+        if root is None:
+            mlog.debug('$FOO_ROOT is unset.')
+            self.is_found = False
+            return
+
+        lib = self.clib_compiler.find_library('foo', environment, [os.path.join(root, 'lib')])
+        if lib is None:
+            mlog.debug('Could not find lib.')
+            self.is_found = False
+            return
+
+        self.compile_args.append(f'-I{os.path.join(root, "include")}')
+        self.link_args.append(lib)
+        self.is_found = True
+```
+
+This code will look for `FOO_ROOT` in the environment, handle `FOO_ROOT` being
+undefined gracefully, then set its `compile_args` and `link_args` gracefully.
+It will also gracefully handle not finding the required lib (hopefully that
+doesn't happen, but it could if, for example, the lib is only static and
+shared linking is requested).
+
+There are a couple of things about this that still aren't ideal. For one, we
+don't want to be reading random environment variables at this point. Those
+should actually be added to `envconfig.Properties` and read in
+`environment.Environment._set_default_properties_from_env` (see how
+`BOOST_ROOT` is handled). We can also handle the `static` keyword and the
+`prefer_static` built-in option. So now that becomes:
+
+```python
+class FooSystemDependency(ExternalDependency):
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        root = environment.properties[self.for_machine].foo_root
+        if root is None:
+            mlog.debug('foo_root is unset.')
+            self.is_found = False
+            return
+
+        get_option = environment.coredata.get_option
+        static_opt = kwargs.get('static', get_option(Mesonlib.OptionKey('prefer_static'))
+        static = Mesonlib.LibType.STATIC if static_opt else Mesonlib.LibType.SHARED
+        lib = self.clib_compiler.find_library(
+            'foo', environment, [os.path.join(root, 'lib')], libtype=static)
+        if lib is None:
+            mlog.debug('Could not find lib.')
+            self.is_found = False
+            return
+
+        self.compile_args.append(f'-I{os.path.join(root, "include")}')
+        self.link_args.append(lib)
+        self.is_found = True
+```
+
+This is nicer in a couple of ways. First we can properly cross compile as we
+are allowed to set `FOO_ROOT` for both the build and host machines, it also
+means that users can override this in their machine files, and if that
+environment variables changes during a Meson reconfigure Meson won't re-read
+it, this is important for reproducibility. Finally, Meson will figure out
+whether it should be finding `libfoo.so` or `libfoo.a` (or the platform
+specific names). Things are looking pretty good now, so it can be added to
+the `packages` dict below:
+
+```python
+packages.update({
+    'foo': FooSystemDependency,
+})
+```
+
+Now, what if foo also provides pkg-config, but it's only shipped on Unices,
+or only included in very recent versions of the dependency? We can use the
+`DependencyFactory` class:
+
+```python
+foo_factory = DependencyFactory(
+    'foo',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+    system_class=FooSystemDependency,
+)
+```
+
+This is a helper function that will generate a default pkg-config based
+dependency, and use the `FooSystemDependency` as well. It can also handle
+custom finders for pkg-config and cmake based dependencies that need some
+extra help. You would then add the `foo_factory` to packages instead of
+`FooSystemDependency`:
+
+```python
+packages.update({
+    'foo': foo_factory,
+})
+```
+
+If you have a dependency that is very complicated, (such as having multiple
+implementations) you may need to write your own factory function. There are a
+number of examples in this package.
+
+_Note_ before we moved to factory functions it was common to use an
+`ExternalDependency` class that would instantiate different types of
+dependencies and hold the one it found. There are a number of drawbacks to
+this approach, and no new dependencies should do this.
+"""
+
+# This is a dict where the keys should be strings, and the values must be one
+# of:
+# - An ExternalDependency subclass
+# - A DependencyFactory object
+# - A callable with a signature of (Environment, MachineChoice, Dict[str, Any]) -> List[Callable[[], ExternalDependency]]
+#
+# The internal "defaults" attribute contains a separate dictionary mapping
+# for lazy imports. The values must be:
+# - a string naming the submodule that should be imported from `mesonbuild.dependencies` to populate the dependency
+packages.defaults.update({
+    # From dev:
+    'gtest': 'dev',
+    'gmock': 'dev',
+    'llvm': 'dev',
+    'valgrind': 'dev',
+    'zlib': 'dev',
+    'jni': 'dev',
+    'jdk': 'dev',
+
+    'boost': 'boost',
+    'cuda': 'cuda',
+
+    # per-file
+    'coarray': 'coarrays',
+    'hdf5': 'hdf5',
+    'mpi': 'mpi',
+    'scalapack': 'scalapack',
+
+    # From misc:
+    'blocks': 'misc',
+    'curses': 'misc',
+    'netcdf': 'misc',
+    'openmp': 'misc',
+    'threads': 'misc',
+    'pcap': 'misc',
+    'cups': 'misc',
+    'libwmf': 'misc',
+    'libgcrypt': 'misc',
+    'gpgme': 'misc',
+    'shaderc': 'misc',
+    'iconv': 'misc',
+    'intl': 'misc',
+    'dl': 'misc',
+    'openssl': 'misc',
+    'libcrypto': 'misc',
+    'libssl': 'misc',
+
+    # From platform:
+    'appleframeworks': 'platform',
+
+    # from python:
+    'python3': 'python',
+    'pybind11': 'python',
+
+    # From ui:
+    'gl': 'ui',
+    'gnustep': 'ui',
+    'sdl2': 'ui',
+    'wxwidgets': 'ui',
+    'vulkan': 'ui',
+
+    # from qt
+    'qt4': 'qt',
+    'qt5': 'qt',
+    'qt6': 'qt',
+})
+_packages_accept_language.update({
+    'hdf5',
+    'mpi',
+    'netcdf',
+    'openmp',
+})
diff --git a/vendored-meson/meson/mesonbuild/dependencies/base.py b/vendored-meson/meson/mesonbuild/dependencies/base.py
new file mode 100644
index 000000000000..ce206b6dd8b1
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/base.py
@@ -0,0 +1,656 @@
+# Copyright 2013-2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies.
+# Custom logic for several other packages are in separate files.
+
+from __future__ import annotations
+import copy
+import os
+import collections
+import itertools
+import typing as T
+from enum import Enum
+
+from .. import mlog, mesonlib
+from ..compilers import clib_langs
+from ..mesonlib import LibType, MachineChoice, MesonException, HoldableObject, OptionKey
+from ..mesonlib import version_compare_many
+#from ..interpreterbase import FeatureDeprecated, FeatureNew
+
+if T.TYPE_CHECKING:
+    from .._typing import ImmutableListProtocol
+    from ..compilers.compilers import Compiler
+    from ..environment import Environment
+    from ..interpreterbase import FeatureCheckBase
+    from ..build import (
+        CustomTarget, IncludeDirs, CustomTargetIndex, LibTypes,
+        StaticLibrary, StructuredSources, ExtractedObjects
+    )
+    from ..mesonlib import FileOrString
+
+
+class DependencyException(MesonException):
+    '''Exceptions raised while trying to find dependencies'''
+
+
+class MissingCompiler:
+    """Represent a None Compiler - when no tool chain is found.
+    replacing AttributeError with DependencyException"""
+
+    def __getattr__(self, item: str) -> T.Any:
+        if item.startswith('__'):
+            raise AttributeError()
+        raise DependencyException('no toolchain found')
+
+    def __bool__(self) -> bool:
+        return False
+
+
+class DependencyMethods(Enum):
+    # Auto means to use whatever dependency checking mechanisms in whatever order meson thinks is best.
+    AUTO = 'auto'
+    PKGCONFIG = 'pkg-config'
+    CMAKE = 'cmake'
+    # The dependency is provided by the standard library and does not need to be linked
+    BUILTIN = 'builtin'
+    # Just specify the standard link arguments, assuming the operating system provides the library.
+    SYSTEM = 'system'
+    # This is only supported on OSX - search the frameworks directory by name.
+    EXTRAFRAMEWORK = 'extraframework'
+    # Detect using the sysconfig module.
+    SYSCONFIG = 'sysconfig'
+    # Specify using a "program"-config style tool
+    CONFIG_TOOL = 'config-tool'
+    # For backwards compatibility
+    SDLCONFIG = 'sdlconfig'
+    CUPSCONFIG = 'cups-config'
+    PCAPCONFIG = 'pcap-config'
+    LIBWMFCONFIG = 'libwmf-config'
+    QMAKE = 'qmake'
+    # Misc
+    DUB = 'dub'
+
+
+DependencyTypeName = T.NewType('DependencyTypeName', str)
+
+
+class Dependency(HoldableObject):
+
+    @classmethod
+    def _process_include_type_kw(cls, kwargs: T.Dict[str, T.Any]) -> str:
+        if 'include_type' not in kwargs:
+            return 'preserve'
+        if not isinstance(kwargs['include_type'], str):
+            raise DependencyException('The include_type kwarg must be a string type')
+        if kwargs['include_type'] not in ['preserve', 'system', 'non-system']:
+            raise DependencyException("include_type may only be one of ['preserve', 'system', 'non-system']")
+        return kwargs['include_type']
+
+    def __init__(self, type_name: DependencyTypeName, kwargs: T.Dict[str, T.Any]) -> None:
+        self.name = f'dep{id(self)}'
+        self.version:  T.Optional[str] = None
+        self.language: T.Optional[str] = None # None means C-like
+        self.is_found = False
+        self.type_name = type_name
+        self.compile_args: T.List[str] = []
+        self.link_args:    T.List[str] = []
+        # Raw -L and -l arguments without manual library searching
+        # If None, self.link_args will be used
+        self.raw_link_args: T.Optional[T.List[str]] = None
+        self.sources: T.List[T.Union['FileOrString', 'CustomTarget', 'StructuredSources']] = []
+        self.extra_files: T.List[mesonlib.File] = []
+        self.include_type = self._process_include_type_kw(kwargs)
+        self.ext_deps: T.List[Dependency] = []
+        self.d_features: T.DefaultDict[str, T.List[T.Any]] = collections.defaultdict(list)
+        self.featurechecks: T.List['FeatureCheckBase'] = []
+        self.feature_since: T.Optional[T.Tuple[str, str]] = None
+
+    def __repr__(self) -> str:
+        return f'<{self.__class__.__name__} {self.name}: {self.is_found}>'
+
+    def is_built(self) -> bool:
+        return False
+
+    def summary_value(self) -> T.Union[str, mlog.AnsiDecorator, mlog.AnsiText]:
+        if not self.found():
+            return mlog.red('NO')
+        if not self.version:
+            return mlog.green('YES')
+        return mlog.AnsiText(mlog.green('YES'), ' ', mlog.cyan(self.version))
+
+    def get_compile_args(self) -> T.List[str]:
+        if self.include_type == 'system':
+            converted = []
+            for i in self.compile_args:
+                if i.startswith('-I') or i.startswith('/I'):
+                    converted += ['-isystem' + i[2:]]
+                else:
+                    converted += [i]
+            return converted
+        if self.include_type == 'non-system':
+            converted = []
+            for i in self.compile_args:
+                if i.startswith('-isystem'):
+                    converted += ['-I' + i[8:]]
+                else:
+                    converted += [i]
+            return converted
+        return self.compile_args
+
+    def get_all_compile_args(self) -> T.List[str]:
+        """Get the compile arguments from this dependency and it's sub dependencies."""
+        return list(itertools.chain(self.get_compile_args(),
+                                    *(d.get_all_compile_args() for d in self.ext_deps)))
+
+    def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]:
+        if raw and self.raw_link_args is not None:
+            return self.raw_link_args
+        return self.link_args
+
+    def get_all_link_args(self) -> T.List[str]:
+        """Get the link arguments from this dependency and it's sub dependencies."""
+        return list(itertools.chain(self.get_link_args(),
+                                    *(d.get_all_link_args() for d in self.ext_deps)))
+
+    def found(self) -> bool:
+        return self.is_found
+
+    def get_sources(self) -> T.List[T.Union['FileOrString', 'CustomTarget', 'StructuredSources']]:
+        """Source files that need to be added to the target.
+        As an example, gtest-all.cc when using GTest."""
+        return self.sources
+
+    def get_extra_files(self) -> T.List[mesonlib.File]:
+        """Mostly for introspection and IDEs"""
+        return self.extra_files
+
+    def get_name(self) -> str:
+        return self.name
+
+    def get_version(self) -> str:
+        if self.version:
+            return self.version
+        else:
+            return 'unknown'
+
+    def get_include_dirs(self) -> T.List['IncludeDirs']:
+        return []
+
+    def get_include_type(self) -> str:
+        return self.include_type
+
+    def get_exe_args(self, compiler: 'Compiler') -> T.List[str]:
+        return []
+
+    def get_pkgconfig_variable(self, variable_name: str,
+                               define_variable: 'ImmutableListProtocol[str]',
+                               default: T.Optional[str]) -> str:
+        raise DependencyException(f'{self.name!r} is not a pkgconfig dependency')
+
+    def get_configtool_variable(self, variable_name: str) -> str:
+        raise DependencyException(f'{self.name!r} is not a config-tool dependency')
+
+    def get_partial_dependency(self, *, compile_args: bool = False,
+                               link_args: bool = False, links: bool = False,
+                               includes: bool = False, sources: bool = False) -> 'Dependency':
+        """Create a new dependency that contains part of the parent dependency.
+
+        The following options can be inherited:
+            links -- all link_with arguments
+            includes -- all include_directory and -I/-isystem calls
+            sources -- any source, header, or generated sources
+            compile_args -- any compile args
+            link_args -- any link args
+
+        Additionally the new dependency will have the version parameter of it's
+        parent (if any) and the requested values of any dependencies will be
+        added as well.
+        """
+        raise RuntimeError('Unreachable code in partial_dependency called')
+
+    def _add_sub_dependency(self, deplist: T.Iterable[T.Callable[[], 'Dependency']]) -> bool:
+        """Add an internal dependency from a list of possible dependencies.
+
+        This method is intended to make it easier to add additional
+        dependencies to another dependency internally.
+
+        Returns true if the dependency was successfully added, false
+        otherwise.
+        """
+        for d in deplist:
+            dep = d()
+            if dep.is_found:
+                self.ext_deps.append(dep)
+                return True
+        return False
+
+    def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+                     configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+                     default_value: T.Optional[str] = None,
+                     pkgconfig_define: T.Optional[T.List[str]] = None) -> str:
+        if default_value is not None:
+            return default_value
+        raise DependencyException(f'No default provided for dependency {self!r}, which is not pkg-config, cmake, or config-tool based.')
+
+    def generate_system_dependency(self, include_type: str) -> 'Dependency':
+        new_dep = copy.deepcopy(self)
+        new_dep.include_type = self._process_include_type_kw({'include_type': include_type})
+        return new_dep
+
+class InternalDependency(Dependency):
+    def __init__(self, version: str, incdirs: T.List['IncludeDirs'], compile_args: T.List[str],
+                 link_args: T.List[str],
+                 libraries: T.List[LibTypes],
+                 whole_libraries: T.List[T.Union[StaticLibrary, CustomTarget, CustomTargetIndex]],
+                 sources: T.Sequence[T.Union[FileOrString, CustomTarget, StructuredSources]],
+                 extra_files: T.Sequence[mesonlib.File],
+                 ext_deps: T.List[Dependency], variables: T.Dict[str, str],
+                 d_module_versions: T.List[T.Union[str, int]], d_import_dirs: T.List['IncludeDirs'],
+                 objects: T.List['ExtractedObjects']):
+        super().__init__(DependencyTypeName('internal'), {})
+        self.version = version
+        self.is_found = True
+        self.include_directories = incdirs
+        self.compile_args = compile_args
+        self.link_args = link_args
+        self.libraries = libraries
+        self.whole_libraries = whole_libraries
+        self.sources = list(sources)
+        self.extra_files = list(extra_files)
+        self.ext_deps = ext_deps
+        self.variables = variables
+        self.objects = objects
+        if d_module_versions:
+            self.d_features['versions'] = d_module_versions
+        if d_import_dirs:
+            self.d_features['import_dirs'] = d_import_dirs
+
+    def __deepcopy__(self, memo: T.Dict[int, 'InternalDependency']) -> 'InternalDependency':
+        result = self.__class__.__new__(self.__class__)
+        assert isinstance(result, InternalDependency)
+        memo[id(self)] = result
+        for k, v in self.__dict__.items():
+            if k in {'libraries', 'whole_libraries'}:
+                setattr(result, k, copy.copy(v))
+            else:
+                setattr(result, k, copy.deepcopy(v, memo))
+        return result
+
+    def summary_value(self) -> mlog.AnsiDecorator:
+        # Omit the version.  Most of the time it will be just the project
+        # version, which is uninteresting in the summary.
+        return mlog.green('YES')
+
+    def is_built(self) -> bool:
+        if self.sources or self.libraries or self.whole_libraries:
+            return True
+        return any(d.is_built() for d in self.ext_deps)
+
+    def get_pkgconfig_variable(self, variable_name: str,
+                               define_variable: 'ImmutableListProtocol[str]',
+                               default: T.Optional[str]) -> str:
+        raise DependencyException('Method "get_pkgconfig_variable()" is '
+                                  'invalid for an internal dependency')
+
+    def get_configtool_variable(self, variable_name: str) -> str:
+        raise DependencyException('Method "get_configtool_variable()" is '
+                                  'invalid for an internal dependency')
+
+    def get_partial_dependency(self, *, compile_args: bool = False,
+                               link_args: bool = False, links: bool = False,
+                               includes: bool = False, sources: bool = False,
+                               extra_files: bool = False) -> InternalDependency:
+        final_compile_args = self.compile_args.copy() if compile_args else []
+        final_link_args = self.link_args.copy() if link_args else []
+        final_libraries = self.libraries.copy() if links else []
+        final_whole_libraries = self.whole_libraries.copy() if links else []
+        final_sources = self.sources.copy() if sources else []
+        final_extra_files = self.extra_files.copy() if extra_files else []
+        final_includes = self.include_directories.copy() if includes else []
+        final_deps = [d.get_partial_dependency(
+            compile_args=compile_args, link_args=link_args, links=links,
+            includes=includes, sources=sources) for d in self.ext_deps]
+        return InternalDependency(
+            self.version, final_includes, final_compile_args,
+            final_link_args, final_libraries, final_whole_libraries,
+            final_sources, final_extra_files, final_deps, self.variables, [], [], [])
+
+    def get_include_dirs(self) -> T.List['IncludeDirs']:
+        return self.include_directories
+
+    def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+                     configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+                     default_value: T.Optional[str] = None,
+                     pkgconfig_define: T.Optional[T.List[str]] = None) -> str:
+        val = self.variables.get(internal, default_value)
+        if val is not None:
+            return val
+        raise DependencyException(f'Could not get an internal variable and no default provided for {self!r}')
+
+    def generate_link_whole_dependency(self) -> Dependency:
+        from ..build import SharedLibrary, CustomTarget, CustomTargetIndex
+        new_dep = copy.deepcopy(self)
+        for x in new_dep.libraries:
+            if isinstance(x, SharedLibrary):
+                raise MesonException('Cannot convert a dependency to link_whole when it contains a '
+                                     'SharedLibrary')
+            elif isinstance(x, (CustomTarget, CustomTargetIndex)) and x.links_dynamically():
+                raise MesonException('Cannot convert a dependency to link_whole when it contains a '
+                                     'CustomTarget or CustomTargetIndex which is a shared library')
+
+        # Mypy doesn't understand that the above is a TypeGuard
+        new_dep.whole_libraries += T.cast('T.List[T.Union[StaticLibrary, CustomTarget, CustomTargetIndex]]',
+                                          new_dep.libraries)
+        new_dep.libraries = []
+        return new_dep
+
+class HasNativeKwarg:
+    def __init__(self, kwargs: T.Dict[str, T.Any]):
+        self.for_machine = self.get_for_machine_from_kwargs(kwargs)
+
+    def get_for_machine_from_kwargs(self, kwargs: T.Dict[str, T.Any]) -> MachineChoice:
+        return MachineChoice.BUILD if kwargs.get('native', False) else MachineChoice.HOST
+
+class ExternalDependency(Dependency, HasNativeKwarg):
+    def __init__(self, type_name: DependencyTypeName, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None):
+        Dependency.__init__(self, type_name, kwargs)
+        self.env = environment
+        self.name = type_name # default
+        self.is_found = False
+        self.language = language
+        version_reqs = kwargs.get('version', None)
+        if isinstance(version_reqs, str):
+            version_reqs = [version_reqs]
+        self.version_reqs: T.Optional[T.List[str]] = version_reqs
+        self.required = kwargs.get('required', True)
+        self.silent = kwargs.get('silent', False)
+        self.static = kwargs.get('static', self.env.coredata.get_option(OptionKey('prefer_static')))
+        self.libtype = LibType.STATIC if self.static else LibType.PREFER_SHARED
+        if not isinstance(self.static, bool):
+            raise DependencyException('Static keyword must be boolean')
+        # Is this dependency to be run on the build platform?
+        HasNativeKwarg.__init__(self, kwargs)
+        self.clib_compiler = detect_compiler(self.name, environment, self.for_machine, self.language)
+
+    def get_compiler(self) -> T.Union['MissingCompiler', 'Compiler']:
+        return self.clib_compiler
+
+    def get_partial_dependency(self, *, compile_args: bool = False,
+                               link_args: bool = False, links: bool = False,
+                               includes: bool = False, sources: bool = False) -> Dependency:
+        new = copy.copy(self)
+        if not compile_args:
+            new.compile_args = []
+        if not link_args:
+            new.link_args = []
+        if not sources:
+            new.sources = []
+        if not includes:
+            pass # TODO maybe filter compile_args?
+        if not sources:
+            new.sources = []
+
+        return new
+
+    def log_details(self) -> str:
+        return ''
+
+    def log_info(self) -> str:
+        return ''
+
+    @staticmethod
+    def log_tried() -> str:
+        return ''
+
+    # Check if dependency version meets the requirements
+    def _check_version(self) -> None:
+        if not self.is_found:
+            return
+
+        if self.version_reqs:
+            # an unknown version can never satisfy any requirement
+            if not self.version:
+                self.is_found = False
+                found_msg: mlog.TV_LoggableList = []
+                found_msg += ['Dependency', mlog.bold(self.name), 'found:']
+                found_msg += [mlog.red('NO'), 'unknown version, but need:', self.version_reqs]
+                mlog.log(*found_msg)
+
+                if self.required:
+                    m = f'Unknown version, but need {self.version_reqs!r}.'
+                    raise DependencyException(m)
+
+            else:
+                (self.is_found, not_found, found) = \
+                    version_compare_many(self.version, self.version_reqs)
+                if not self.is_found:
+                    found_msg = ['Dependency', mlog.bold(self.name), 'found:']
+                    found_msg += [mlog.red('NO'),
+                                  'found', mlog.normal_cyan(self.version), 'but need:',
+                                  mlog.bold(', '.join([f"'{e}'" for e in not_found]))]
+                    if found:
+                        found_msg += ['; matched:',
+                                      ', '.join([f"'{e}'" for e in found])]
+                    mlog.log(*found_msg)
+
+                    if self.required:
+                        m = 'Invalid version, need {!r} {!r} found {!r}.'
+                        raise DependencyException(m.format(self.name, not_found, self.version))
+                    return
+
+
+class NotFoundDependency(Dependency):
+    def __init__(self, name: str, environment: 'Environment') -> None:
+        super().__init__(DependencyTypeName('not-found'), {})
+        self.env = environment
+        self.name = name
+        self.is_found = False
+
+    def get_partial_dependency(self, *, compile_args: bool = False,
+                               link_args: bool = False, links: bool = False,
+                               includes: bool = False, sources: bool = False) -> 'NotFoundDependency':
+        return copy.copy(self)
+
+
+class ExternalLibrary(ExternalDependency):
+    def __init__(self, name: str, link_args: T.List[str], environment: 'Environment',
+                 language: str, silent: bool = False) -> None:
+        super().__init__(DependencyTypeName('library'), environment, {}, language=language)
+        self.name = name
+        self.language = language
+        self.is_found = False
+        if link_args:
+            self.is_found = True
+            self.link_args = link_args
+        if not silent:
+            if self.is_found:
+                mlog.log('Library', mlog.bold(name), 'found:', mlog.green('YES'))
+            else:
+                mlog.log('Library', mlog.bold(name), 'found:', mlog.red('NO'))
+
+    def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]:
+        '''
+        External libraries detected using a compiler must only be used with
+        compatible code. For instance, Vala libraries (.vapi files) cannot be
+        used with C code, and not all Rust library types can be linked with
+        C-like code. Note that C++ libraries *can* be linked with C code with
+        a C++ linker (and vice-versa).
+        '''
+        # Using a vala library in a non-vala target, or a non-vala library in a vala target
+        # XXX: This should be extended to other non-C linkers such as Rust
+        if (self.language == 'vala' and language != 'vala') or \
+           (language == 'vala' and self.language != 'vala'):
+            return []
+        return super().get_link_args(language=language, raw=raw)
+
+    def get_partial_dependency(self, *, compile_args: bool = False,
+                               link_args: bool = False, links: bool = False,
+                               includes: bool = False, sources: bool = False) -> 'ExternalLibrary':
+        # External library only has link_args, so ignore the rest of the
+        # interface.
+        new = copy.copy(self)
+        if not link_args:
+            new.link_args = []
+        return new
+
+
+def get_leaf_external_dependencies(deps: T.List[Dependency]) -> T.List[Dependency]:
+    if not deps:
+        # Ensure that we always return a new instance
+        return deps.copy()
+    final_deps = []
+    while deps:
+        next_deps = []
+        for d in mesonlib.listify(deps):
+            if not isinstance(d, Dependency) or d.is_built():
+                raise DependencyException('Dependencies must be external dependencies')
+            final_deps.append(d)
+            next_deps.extend(d.ext_deps)
+        deps = next_deps
+    return final_deps
+
+
+def sort_libpaths(libpaths: T.List[str], refpaths: T.List[str]) -> T.List[str]:
+    """Sort  according to 
+
+    It is intended to be used to sort -L flags returned by pkg-config.
+    Pkg-config returns flags in random order which cannot be relied on.
+    """
+    if len(refpaths) == 0:
+        return list(libpaths)
+
+    def key_func(libpath: str) -> T.Tuple[int, int]:
+        common_lengths: T.List[int] = []
+        for refpath in refpaths:
+            try:
+                common_path: str = os.path.commonpath([libpath, refpath])
+            except ValueError:
+                common_path = ''
+            common_lengths.append(len(common_path))
+        max_length = max(common_lengths)
+        max_index = common_lengths.index(max_length)
+        reversed_max_length = len(refpaths[max_index]) - max_length
+        return (max_index, reversed_max_length)
+    return sorted(libpaths, key=key_func)
+
+def strip_system_libdirs(environment: 'Environment', for_machine: MachineChoice, link_args: T.List[str]) -> T.List[str]:
+    """Remove -L arguments.
+
+    leaving these in will break builds where a user has a version of a library
+    in the system path, and a different version not in the system path if they
+    want to link against the non-system path version.
+    """
+    exclude = {f'-L{p}' for p in environment.get_compiler_system_lib_dirs(for_machine)}
+    return [l for l in link_args if l not in exclude]
+
+def strip_system_includedirs(environment: 'Environment', for_machine: MachineChoice, include_args: T.List[str]) -> T.List[str]:
+    """Remove -I arguments.
+
+    leaving these in will break builds where user want dependencies with system
+    include-type used in rust.bindgen targets as if will cause system headers
+    to not be found.
+    """
+
+    exclude = {f'-I{p}' for p in environment.get_compiler_system_include_dirs(for_machine)}
+    return [i for i in include_args if i not in exclude]
+
+def process_method_kw(possible: T.Iterable[DependencyMethods], kwargs: T.Dict[str, T.Any]) -> T.List[DependencyMethods]:
+    method = kwargs.get('method', 'auto')  # type: T.Union[DependencyMethods, str]
+    if isinstance(method, DependencyMethods):
+        return [method]
+    # TODO: try/except?
+    if method not in [e.value for e in DependencyMethods]:
+        raise DependencyException(f'method {method!r} is invalid')
+    method = DependencyMethods(method)
+
+    # Raise FeatureNew where appropriate
+    if method is DependencyMethods.CONFIG_TOOL:
+        # FIXME: needs to get a handle on the subproject
+        # FeatureNew.single_use('Configuration method "config-tool"', '0.44.0')
+        pass
+    # This sets per-tool config methods which are deprecated to to the new
+    # generic CONFIG_TOOL value.
+    if method in [DependencyMethods.SDLCONFIG, DependencyMethods.CUPSCONFIG,
+                  DependencyMethods.PCAPCONFIG, DependencyMethods.LIBWMFCONFIG]:
+        # FIXME: needs to get a handle on the subproject
+        #FeatureDeprecated.single_use(f'Configuration method {method.value}', '0.44', 'Use "config-tool" instead.')
+        method = DependencyMethods.CONFIG_TOOL
+    if method is DependencyMethods.QMAKE:
+        # FIXME: needs to get a handle on the subproject
+        # FeatureDeprecated.single_use('Configuration method "qmake"', '0.58', 'Use "config-tool" instead.')
+        method = DependencyMethods.CONFIG_TOOL
+
+    # Set the detection method. If the method is set to auto, use any available method.
+    # If method is set to a specific string, allow only that detection method.
+    if method == DependencyMethods.AUTO:
+        methods = list(possible)
+    elif method in possible:
+        methods = [method]
+    else:
+        raise DependencyException(
+            'Unsupported detection method: {}, allowed methods are {}'.format(
+                method.value,
+                mlog.format_list([x.value for x in [DependencyMethods.AUTO] + list(possible)])))
+
+    return methods
+
+def detect_compiler(name: str, env: 'Environment', for_machine: MachineChoice,
+                    language: T.Optional[str]) -> T.Union['MissingCompiler', 'Compiler']:
+    """Given a language and environment find the compiler used."""
+    compilers = env.coredata.compilers[for_machine]
+
+    # Set the compiler for this dependency if a language is specified,
+    # else try to pick something that looks usable.
+    if language:
+        if language not in compilers:
+            m = name.capitalize() + ' requires a {0} compiler, but ' \
+                '{0} is not in the list of project languages'
+            raise DependencyException(m.format(language.capitalize()))
+        return compilers[language]
+    else:
+        for lang in clib_langs:
+            try:
+                return compilers[lang]
+            except KeyError:
+                continue
+    return MissingCompiler()
+
+
+class SystemDependency(ExternalDependency):
+
+    """Dependency base for System type dependencies."""
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                 language: T.Optional[str] = None) -> None:
+        super().__init__(DependencyTypeName('system'), env, kwargs, language=language)
+        self.name = name
+
+    @staticmethod
+    def log_tried() -> str:
+        return 'system'
+
+
+class BuiltinDependency(ExternalDependency):
+
+    """Dependency base for Builtin type dependencies."""
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                 language: T.Optional[str] = None) -> None:
+        super().__init__(DependencyTypeName('builtin'), env, kwargs, language=language)
+        self.name = name
+
+    @staticmethod
+    def log_tried() -> str:
+        return 'builtin'
diff --git a/vendored-meson/meson/mesonbuild/dependencies/boost.py b/vendored-meson/meson/mesonbuild/dependencies/boost.py
new file mode 100644
index 000000000000..0e4dab9a5570
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/boost.py
@@ -0,0 +1,1092 @@
+# Copyright 2013-2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import re
+import dataclasses
+import functools
+import typing as T
+from pathlib import Path
+
+from .. import mlog
+from .. import mesonlib
+
+from .base import DependencyException, SystemDependency
+from .detect import packages
+from .pkgconfig import PkgConfigDependency
+from .misc import threads_factory
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment, Properties
+
+# On windows 3 directory layouts are supported:
+# * The default layout (versioned) installed:
+#   - $BOOST_ROOT/include/boost-x_x/boost/*.hpp
+#   - $BOOST_ROOT/lib/*.lib
+# * The non-default layout (system) installed:
+#   - $BOOST_ROOT/include/boost/*.hpp
+#   - $BOOST_ROOT/lib/*.lib
+# * The pre-built binaries from sf.net:
+#   - $BOOST_ROOT/boost/*.hpp
+#   - $BOOST_ROOT/lib-/*.lib where arch=32/64 and compiler=msvc-14.1
+#
+# Note that we should also try to support:
+# mingw-w64 / Windows : libboost_-mt.a            (location = /mingw64/lib/)
+#                       libboost_-mt.dll.a
+#
+# The `modules` argument accept library names. This is because every module that
+# has libraries to link against also has multiple options regarding how to
+# link. See for example:
+# * http://www.boost.org/doc/libs/1_65_1/libs/test/doc/html/boost_test/usage_variants.html
+# * http://www.boost.org/doc/libs/1_65_1/doc/html/stacktrace/configuration_and_build.html
+# * http://www.boost.org/doc/libs/1_65_1/libs/math/doc/html/math_toolkit/main_tr1.html
+
+# **On Unix**, official packaged versions of boost libraries follow the following schemes:
+#
+# Linux / Debian:   libboost_.so -> libboost_.so.1.66.0
+# Linux / Red Hat:  libboost_.so -> libboost_.so.1.66.0
+# Linux / OpenSuse: libboost_.so -> libboost_.so.1.66.0
+# Win   / Cygwin:   libboost_.dll.a                                 (location = /usr/lib)
+#                   libboost_.a
+#                   cygboost__1_64.dll                              (location = /usr/bin)
+# Win   / VS:       boost_-vc-mt[-gd]--1_67.dll          (location = C:/local/boost_1_67_0)
+# Mac   / homebrew: libboost_.dylib + libboost_-mt.dylib    (location = /usr/local/lib)
+# Mac   / macports: libboost_.dylib + libboost_-mt.dylib    (location = /opt/local/lib)
+#
+# Its not clear that any other abi tags (e.g. -gd) are used in official packages.
+#
+# On Linux systems, boost libs have multithreading support enabled, but without the -mt tag.
+#
+# Boost documentation recommends using complex abi tags like "-lboost_regex-gcc34-mt-d-1_36".
+# (See http://www.boost.org/doc/libs/1_66_0/more/getting_started/unix-variants.html#library-naming)
+# However, its not clear that any Unix distribution follows this scheme.
+# Furthermore, the boost documentation for unix above uses examples from windows like
+#   "libboost_regex-vc71-mt-d-x86-1_34.lib", so apparently the abi tags may be more aimed at windows.
+#
+# We follow the following strategy for finding modules:
+# A) Detect potential boost root directories (uses also BOOST_ROOT env var)
+# B) Foreach candidate
+#   1. Look for the boost headers (boost/version.pp)
+#   2. Find all boost libraries
+#     2.1 Add all libraries in lib*
+#     2.2 Filter out non boost libraries
+#     2.3 Filter the remaining libraries based on the meson requirements (static/shared, etc.)
+#     2.4 Ensure that all libraries have the same boost tag (and are thus compatible)
+#   3. Select the libraries matching the requested modules
+
+@dataclasses.dataclass(eq=False, order=False)
+class UnknownFileException(Exception):
+    path: Path
+
+@functools.total_ordering
+class BoostIncludeDir():
+    def __init__(self, path: Path, version_int: int):
+        self.path = path
+        self.version_int = version_int
+        major = int(self.version_int / 100000)
+        minor = int((self.version_int / 100) % 1000)
+        patch = int(self.version_int % 100)
+        self.version = f'{major}.{minor}.{patch}'
+        self.version_lib = f'{major}_{minor}'
+
+    def __repr__(self) -> str:
+        return f''
+
+    def __lt__(self, other: object) -> bool:
+        if isinstance(other, BoostIncludeDir):
+            return (self.version_int, self.path) < (other.version_int, other.path)
+        return NotImplemented
+
+@functools.total_ordering
+class BoostLibraryFile():
+    # Python libraries are special because of the included
+    # minor version in the module name.
+    boost_python_libs = ['boost_python', 'boost_numpy']
+    reg_python_mod_split = re.compile(r'(boost_[a-zA-Z]+)([0-9]*)')
+
+    reg_abi_tag = re.compile(r'^s?g?y?d?p?n?$')
+    reg_ver_tag = re.compile(r'^[0-9_]+$')
+
+    def __init__(self, path: Path):
+        self.path = path
+        self.name = self.path.name
+
+        # Initialize default properties
+        self.static = False
+        self.toolset = ''
+        self.arch = ''
+        self.version_lib = ''
+        self.mt = True
+
+        self.runtime_static = False
+        self.runtime_debug = False
+        self.python_debug = False
+        self.debug = False
+        self.stlport = False
+        self.deprecated_iostreams = False
+
+        # Post process the library name
+        name_parts = self.name.split('.')
+        self.basename = name_parts[0]
+        self.suffixes = name_parts[1:]
+        self.vers_raw = [x for x in self.suffixes if x.isdigit()]
+        self.suffixes = [x for x in self.suffixes if not x.isdigit()]
+        self.nvsuffix = '.'.join(self.suffixes)  # Used for detecting the library type
+        self.nametags = self.basename.split('-')
+        self.mod_name = self.nametags[0]
+        if self.mod_name.startswith('lib'):
+            self.mod_name = self.mod_name[3:]
+
+        # Set library version if possible
+        if len(self.vers_raw) >= 2:
+            self.version_lib = '{}_{}'.format(self.vers_raw[0], self.vers_raw[1])
+
+        # Detecting library type
+        if self.nvsuffix in {'so', 'dll', 'dll.a', 'dll.lib', 'dylib'}:
+            self.static = False
+        elif self.nvsuffix in {'a', 'lib'}:
+            self.static = True
+        else:
+            raise UnknownFileException(self.path)
+
+        # boost_.lib is the dll import library
+        if self.basename.startswith('boost_') and self.nvsuffix == 'lib':
+            self.static = False
+
+        # Process tags
+        tags = self.nametags[1:]
+        # Filter out the python version tag and fix modname
+        if self.is_python_lib():
+            tags = self.fix_python_name(tags)
+        if not tags:
+            return
+
+        # Without any tags mt is assumed, however, an absence of mt in the name
+        # with tags present indicates that the lib was built without mt support
+        self.mt = False
+        for i in tags:
+            if i == 'mt':
+                self.mt = True
+            elif len(i) == 3 and i[1:] in {'32', '64'}:
+                self.arch = i
+            elif BoostLibraryFile.reg_abi_tag.match(i):
+                self.runtime_static = 's' in i
+                self.runtime_debug = 'g' in i
+                self.python_debug = 'y' in i
+                self.debug = 'd' in i
+                self.stlport = 'p' in i
+                self.deprecated_iostreams = 'n' in i
+            elif BoostLibraryFile.reg_ver_tag.match(i):
+                self.version_lib = i
+            else:
+                self.toolset = i
+
+    def __repr__(self) -> str:
+        return f''
+
+    def __lt__(self, other: object) -> bool:
+        if isinstance(other, BoostLibraryFile):
+            return (
+                self.mod_name, self.static, self.version_lib, self.arch,
+                not self.mt, not self.runtime_static,
+                not self.debug, self.runtime_debug, self.python_debug,
+                self.stlport, self.deprecated_iostreams,
+                self.name,
+            ) < (
+                other.mod_name, other.static, other.version_lib, other.arch,
+                not other.mt, not other.runtime_static,
+                not other.debug, other.runtime_debug, other.python_debug,
+                other.stlport, other.deprecated_iostreams,
+                other.name,
+            )
+        return NotImplemented
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, BoostLibraryFile):
+            return self.name == other.name
+        return NotImplemented
+
+    def __hash__(self) -> int:
+        return hash(self.name)
+
+    @property
+    def abitag(self) -> str:
+        abitag = ''
+        abitag += 'S' if self.static else '-'
+        abitag += 'M' if self.mt else '-'
+        abitag += ' '
+        abitag += 's' if self.runtime_static else '-'
+        abitag += 'g' if self.runtime_debug else '-'
+        abitag += 'y' if self.python_debug else '-'
+        abitag += 'd' if self.debug else '-'
+        abitag += 'p' if self.stlport else '-'
+        abitag += 'n' if self.deprecated_iostreams else '-'
+        abitag += ' ' + (self.arch or '???')
+        abitag += ' ' + (self.toolset or '?')
+        abitag += ' ' + (self.version_lib or 'x_xx')
+        return abitag
+
+    def is_boost(self) -> bool:
+        return any(self.name.startswith(x) for x in ['libboost_', 'boost_'])
+
+    def is_python_lib(self) -> bool:
+        return any(self.mod_name.startswith(x) for x in BoostLibraryFile.boost_python_libs)
+
+    def fix_python_name(self, tags: T.List[str]) -> T.List[str]:
+        # Handle the boost_python naming madness.
+        # See https://github.com/mesonbuild/meson/issues/4788 for some distro
+        # specific naming variations.
+        other_tags = []  # type: T.List[str]
+
+        # Split the current modname into the base name and the version
+        m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name)
+        cur_name = m_cur.group(1)
+        cur_vers = m_cur.group(2)
+
+        # Update the current version string if the new version string is longer
+        def update_vers(new_vers: str) -> None:
+            nonlocal cur_vers
+            new_vers = new_vers.replace('_', '')
+            new_vers = new_vers.replace('.', '')
+            if not new_vers.isdigit():
+                return
+            if len(new_vers) > len(cur_vers):
+                cur_vers = new_vers
+
+        for i in tags:
+            if i.startswith('py'):
+                update_vers(i[2:])
+            elif i.isdigit():
+                update_vers(i)
+            elif len(i) >= 3 and i[0].isdigit and i[2].isdigit() and i[1] == '.':
+                update_vers(i)
+            else:
+                other_tags += [i]
+
+        self.mod_name = cur_name + cur_vers
+        return other_tags
+
+    def mod_name_matches(self, mod_name: str) -> bool:
+        if self.mod_name == mod_name:
+            return True
+        if not self.is_python_lib():
+            return False
+
+        m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name)
+        m_arg = BoostLibraryFile.reg_python_mod_split.match(mod_name)
+
+        if not m_cur or not m_arg:
+            return False
+
+        if m_cur.group(1) != m_arg.group(1):
+            return False
+
+        cur_vers = m_cur.group(2)
+        arg_vers = m_arg.group(2)
+
+        # Always assume python 2 if nothing is specified
+        if not arg_vers:
+            arg_vers = '2'
+
+        return cur_vers.startswith(arg_vers)
+
+    def version_matches(self, version_lib: str) -> bool:
+        # If no version tag is present, assume that it fits
+        if not self.version_lib or not version_lib:
+            return True
+        return self.version_lib == version_lib
+
+    def arch_matches(self, arch: str) -> bool:
+        # If no version tag is present, assume that it fits
+        if not self.arch or not arch:
+            return True
+        return self.arch == arch
+
+    def vscrt_matches(self, vscrt: str) -> bool:
+        # If no vscrt tag present, assume that it fits  ['/MD', '/MDd', '/MT', '/MTd']
+        if not vscrt:
+            return True
+        if vscrt in {'/MD', '-MD'}:
+            return not self.runtime_static and not self.runtime_debug
+        elif vscrt in {'/MDd', '-MDd'}:
+            return not self.runtime_static and self.runtime_debug
+        elif vscrt in {'/MT', '-MT'}:
+            return (self.runtime_static or not self.static) and not self.runtime_debug
+        elif vscrt in {'/MTd', '-MTd'}:
+            return (self.runtime_static or not self.static) and self.runtime_debug
+
+        mlog.warning(f'Boost: unknown vscrt tag {vscrt}. This may cause the compilation to fail. Please consider reporting this as a bug.', once=True)
+        return True
+
+    def get_compiler_args(self) -> T.List[str]:
+        args = []  # type: T.List[str]
+        if self.mod_name in boost_libraries:
+            libdef = boost_libraries[self.mod_name]  # type: BoostLibrary
+            if self.static:
+                args += libdef.static
+            else:
+                args += libdef.shared
+            if self.mt:
+                args += libdef.multi
+            else:
+                args += libdef.single
+        return args
+
+    def get_link_args(self) -> T.List[str]:
+        return [self.path.as_posix()]
+
+class BoostDependency(SystemDependency):
+    def __init__(self, environment: Environment, kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__('boost', environment, kwargs, language='cpp')
+        buildtype = environment.coredata.get_option(mesonlib.OptionKey('buildtype'))
+        assert isinstance(buildtype, str)
+        self.debug = buildtype.startswith('debug')
+        self.multithreading = kwargs.get('threading', 'multi') == 'multi'
+
+        self.boost_root = None  # type: T.Optional[Path]
+        self.explicit_static = 'static' in kwargs
+
+        # Extract and validate modules
+        self.modules = mesonlib.extract_as_list(kwargs, 'modules')  # type: T.List[str]
+        for i in self.modules:
+            if not isinstance(i, str):
+                raise DependencyException('Boost module argument is not a string.')
+            if i.startswith('boost_'):
+                raise DependencyException('Boost modules must be passed without the boost_ prefix')
+
+        self.modules_found = []    # type: T.List[str]
+        self.modules_missing = []  # type: T.List[str]
+
+        # Do we need threads?
+        if 'thread' in self.modules:
+            if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
+                self.is_found = False
+                return
+
+        # Try figuring out the architecture tag
+        self.arch = environment.machines[self.for_machine].cpu_family
+        self.arch = boost_arch_map.get(self.arch, None)
+
+        # First, look for paths specified in a machine file
+        props = self.env.properties[self.for_machine]
+        if any(x in self.env.properties[self.for_machine] for x in
+               ['boost_includedir', 'boost_librarydir', 'boost_root']):
+            self.detect_boost_machine_file(props)
+            return
+
+        # Finally, look for paths from .pc files and from searching the filesystem
+        self.detect_roots()
+
+    def check_and_set_roots(self, roots: T.List[Path], use_system: bool) -> None:
+        roots = list(mesonlib.OrderedSet(roots))
+        for j in roots:
+            #   1. Look for the boost headers (boost/version.hpp)
+            mlog.debug(f'Checking potential boost root {j.as_posix()}')
+            inc_dirs = self.detect_inc_dirs(j)
+            inc_dirs = sorted(inc_dirs, reverse=True)  # Prefer the newer versions
+
+            # Early abort when boost is not found
+            if not inc_dirs:
+                continue
+
+            lib_dirs = self.detect_lib_dirs(j, use_system)
+            self.is_found = self.run_check(inc_dirs, lib_dirs)
+            if self.is_found:
+                self.boost_root = j
+                break
+
+    def detect_boost_machine_file(self, props: 'Properties') -> None:
+        """Detect boost with values in the machine file or environment.
+
+        The machine file values are defaulted to the environment values.
+        """
+        # XXX: if we had a TypedDict we wouldn't need this
+        incdir = props.get('boost_includedir')
+        assert incdir is None or isinstance(incdir, str)
+        libdir = props.get('boost_librarydir')
+        assert libdir is None or isinstance(libdir, str)
+
+        if incdir and libdir:
+            inc_dir = Path(incdir)
+            lib_dir = Path(libdir)
+
+            if not inc_dir.is_absolute() or not lib_dir.is_absolute():
+                raise DependencyException('Paths given for boost_includedir and boost_librarydir in machine file must be absolute')
+
+            mlog.debug('Trying to find boost with:')
+            mlog.debug(f'  - boost_includedir = {inc_dir}')
+            mlog.debug(f'  - boost_librarydir = {lib_dir}')
+
+            return self.detect_split_root(inc_dir, lib_dir)
+
+        elif incdir or libdir:
+            raise DependencyException('Both boost_includedir *and* boost_librarydir have to be set in your machine file (one is not enough)')
+
+        rootdir = props.get('boost_root')
+        # It shouldn't be possible to get here without something in boost_root
+        assert rootdir
+
+        raw_paths = mesonlib.stringlistify(rootdir)
+        paths = [Path(x) for x in raw_paths]
+        if paths and any(not x.is_absolute() for x in paths):
+            raise DependencyException('boost_root path given in machine file must be absolute')
+
+        self.check_and_set_roots(paths, use_system=False)
+
+    def run_check(self, inc_dirs: T.List[BoostIncludeDir], lib_dirs: T.List[Path]) -> bool:
+        mlog.debug('  - potential library dirs: {}'.format([x.as_posix() for x in lib_dirs]))
+        mlog.debug('  - potential include dirs: {}'.format([x.path.as_posix() for x in inc_dirs]))
+
+        #   2. Find all boost libraries
+        libs = []  # type: T.List[BoostLibraryFile]
+        for i in lib_dirs:
+            libs = self.detect_libraries(i)
+            if libs:
+                mlog.debug(f'  - found boost library dir: {i}')
+                # mlog.debug('  - raw library list:')
+                # for j in libs:
+                #     mlog.debug('    - {}'.format(j))
+                break
+        libs = sorted(set(libs))
+
+        modules = ['boost_' + x for x in self.modules]
+        for inc in inc_dirs:
+            mlog.debug(f'  - found boost {inc.version} include dir: {inc.path}')
+            f_libs = self.filter_libraries(libs, inc.version_lib)
+
+            mlog.debug('  - filtered library list:')
+            for j in f_libs:
+                mlog.debug(f'    - {j}')
+
+            #   3. Select the libraries matching the requested modules
+            not_found = []  # type: T.List[str]
+            selected_modules = []  # type: T.List[BoostLibraryFile]
+            for mod in modules:
+                found = False
+                for l in f_libs:
+                    if l.mod_name_matches(mod):
+                        selected_modules += [l]
+                        found = True
+                        break
+                if not found:
+                    not_found += [mod]
+
+            # log the result
+            mlog.debug('  - found:')
+            comp_args = []  # type: T.List[str]
+            link_args = []  # type: T.List[str]
+            for j in selected_modules:
+                c_args = j.get_compiler_args()
+                l_args = j.get_link_args()
+                mlog.debug('    - {:<24} link={} comp={}'.format(j.mod_name, str(l_args), str(c_args)))
+                comp_args += c_args
+                link_args += l_args
+
+            comp_args = list(mesonlib.OrderedSet(comp_args))
+            link_args = list(mesonlib.OrderedSet(link_args))
+
+            self.modules_found = [x.mod_name for x in selected_modules]
+            self.modules_found = [x[6:] for x in self.modules_found]
+            self.modules_found = sorted(set(self.modules_found))
+            self.modules_missing = not_found
+            self.modules_missing = [x[6:] for x in self.modules_missing]
+            self.modules_missing = sorted(set(self.modules_missing))
+
+            # if we found all modules we are done
+            if not not_found:
+                self.version = inc.version
+                self.compile_args = ['-I' + inc.path.as_posix()]
+                self.compile_args += comp_args
+                self.compile_args += self._extra_compile_args()
+                self.compile_args = list(mesonlib.OrderedSet(self.compile_args))
+                self.link_args = link_args
+                mlog.debug(f'  - final compile args: {self.compile_args}')
+                mlog.debug(f'  - final link args:    {self.link_args}')
+                return True
+
+            # in case we missed something log it and try again
+            mlog.debug('  - NOT found:')
+            for mod in not_found:
+                mlog.debug(f'    - {mod}')
+
+        return False
+
+    def detect_inc_dirs(self, root: Path) -> T.List[BoostIncludeDir]:
+        candidates = []  # type: T.List[Path]
+        inc_root = root / 'include'
+
+        candidates += [root / 'boost']
+        candidates += [inc_root / 'boost']
+        if inc_root.is_dir():
+            for i in inc_root.iterdir():
+                if not i.is_dir() or not i.name.startswith('boost-'):
+                    continue
+                candidates += [i / 'boost']
+        candidates = [x for x in candidates if x.is_dir()]
+        candidates = [x / 'version.hpp' for x in candidates]
+        candidates = [x for x in candidates if x.exists()]
+        return [self._include_dir_from_version_header(x) for x in candidates]
+
+    def detect_lib_dirs(self, root: Path, use_system: bool) -> T.List[Path]:
+        # First check the system include paths. Only consider those within the
+        # given root path
+
+        if use_system:
+            system_dirs_t = self.clib_compiler.get_library_dirs(self.env)
+            system_dirs = [Path(x) for x in system_dirs_t]
+            system_dirs = [x.resolve() for x in system_dirs if x.exists()]
+            system_dirs = [x for x in system_dirs if mesonlib.path_is_in_root(x, root)]
+            system_dirs = list(mesonlib.OrderedSet(system_dirs))
+
+            if system_dirs:
+                return system_dirs
+
+        # No system include paths were found --> fall back to manually looking
+        # for library dirs in root
+        dirs = []     # type: T.List[Path]
+        subdirs = []  # type: T.List[Path]
+        for i in root.iterdir():
+            if i.is_dir() and i.name.startswith('lib'):
+                dirs += [i]
+
+        # Some distros put libraries not directly inside /usr/lib but in /usr/lib/x86_64-linux-gnu
+        for i in dirs:
+            for j in i.iterdir():
+                if j.is_dir() and j.name.endswith('-linux-gnu'):
+                    subdirs += [j]
+
+        # Filter out paths that don't match the target arch to avoid finding
+        # the wrong libraries. See https://github.com/mesonbuild/meson/issues/7110
+        if not self.arch:
+            return dirs + subdirs
+
+        arch_list_32 = ['32', 'i386']
+        arch_list_64 = ['64']
+
+        raw_list = dirs + subdirs
+        no_arch = [x for x in raw_list if not any(y in x.name for y in arch_list_32 + arch_list_64)]
+
+        matching_arch = []  # type: T.List[Path]
+        if '32' in self.arch:
+            matching_arch = [x for x in raw_list if any(y in x.name for y in arch_list_32)]
+        elif '64' in self.arch:
+            matching_arch = [x for x in raw_list if any(y in x.name for y in arch_list_64)]
+
+        return sorted(matching_arch) + sorted(no_arch)
+
+    def filter_libraries(self, libs: T.List[BoostLibraryFile], lib_vers: str) -> T.List[BoostLibraryFile]:
+        # MSVC is very picky with the library tags
+        vscrt = ''
+        try:
+            crt_val = self.env.coredata.options[mesonlib.OptionKey('b_vscrt')].value
+            buildtype = self.env.coredata.options[mesonlib.OptionKey('buildtype')].value
+            vscrt = self.clib_compiler.get_crt_compile_args(crt_val, buildtype)[0]
+        except (KeyError, IndexError, AttributeError):
+            pass
+
+        # mlog.debug('    - static: {}'.format(self.static))
+        # mlog.debug('    - not explicit static: {}'.format(not self.explicit_static))
+        # mlog.debug('    - mt: {}'.format(self.multithreading))
+        # mlog.debug('    - version: {}'.format(lib_vers))
+        # mlog.debug('    - arch: {}'.format(self.arch))
+        # mlog.debug('    - vscrt: {}'.format(vscrt))
+        libs = [x for x in libs if x.static == self.static or not self.explicit_static]
+        libs = [x for x in libs if x.mt == self.multithreading]
+        libs = [x for x in libs if x.version_matches(lib_vers)]
+        libs = [x for x in libs if x.arch_matches(self.arch)]
+        libs = [x for x in libs if x.vscrt_matches(vscrt)]
+        libs = [x for x in libs if x.nvsuffix != 'dll']  # Only link to import libraries
+
+        # Only filter by debug when we are building in release mode. Debug
+        # libraries are automatically preferred through sorting otherwise.
+        if not self.debug:
+            libs = [x for x in libs if not x.debug]
+
+        # Take the abitag from the first library and filter by it. This
+        # ensures that we have a set of libraries that are always compatible.
+        if not libs:
+            return []
+        abitag = libs[0].abitag
+        libs = [x for x in libs if x.abitag == abitag]
+
+        return libs
+
+    def detect_libraries(self, libdir: Path) -> T.List[BoostLibraryFile]:
+        libs = set()  # type: T.Set[BoostLibraryFile]
+        for i in libdir.iterdir():
+            if not i.is_file():
+                continue
+            if not any(i.name.startswith(x) for x in ['libboost_', 'boost_']):
+                continue
+            # Windows binaries from SourceForge ship with PDB files alongside
+            # DLLs (#8325).  Ignore them.
+            if i.name.endswith('.pdb'):
+                continue
+
+            try:
+                libs.add(BoostLibraryFile(i.resolve()))
+            except UnknownFileException as e:
+                mlog.warning('Boost: ignoring unknown file {} under lib directory'.format(e.path.name))
+
+        return [x for x in libs if x.is_boost()]  # Filter out no boost libraries
+
+    def detect_split_root(self, inc_dir: Path, lib_dir: Path) -> None:
+        boost_inc_dir = None
+        for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']:
+            if j.is_file():
+                boost_inc_dir = self._include_dir_from_version_header(j)
+                break
+        if not boost_inc_dir:
+            self.is_found = False
+            return
+
+        self.is_found = self.run_check([boost_inc_dir], [lib_dir])
+
+    def detect_roots(self) -> None:
+        roots = []  # type: T.List[Path]
+
+        # Try getting the BOOST_ROOT from a boost.pc if it exists. This primarily
+        # allows BoostDependency to find boost from Conan. See #5438
+        try:
+            boost_pc = PkgConfigDependency('boost', self.env, {'required': False})
+            if boost_pc.found():
+                boost_root = boost_pc.get_pkgconfig_variable('prefix', [], None)
+                if boost_root:
+                    roots += [Path(boost_root)]
+        except DependencyException:
+            pass
+
+        # Add roots from system paths
+        inc_paths = [Path(x) for x in self.clib_compiler.get_default_include_dirs()]
+        inc_paths = [x.parent for x in inc_paths if x.exists()]
+        inc_paths = [x.resolve() for x in inc_paths]
+        roots += inc_paths
+
+        # Add system paths
+        if self.env.machines[self.for_machine].is_windows():
+            # Where boost built from source actually installs it
+            c_root = Path('C:/Boost')
+            if c_root.is_dir():
+                roots += [c_root]
+
+            # Where boost documentation says it should be
+            prog_files = Path('C:/Program Files/boost')
+            # Where boost prebuilt binaries are
+            local_boost = Path('C:/local')
+
+            candidates = []  # type: T.List[Path]
+            if prog_files.is_dir():
+                candidates += [*prog_files.iterdir()]
+            if local_boost.is_dir():
+                candidates += [*local_boost.iterdir()]
+
+            roots += [x for x in candidates if x.name.lower().startswith('boost') and x.is_dir()]
+        else:
+            tmp = []  # type: T.List[Path]
+
+            # Add some default system paths
+            tmp += [Path('/opt/local')]
+            tmp += [Path('/usr/local/opt/boost')]
+            tmp += [Path('/usr/local')]
+            tmp += [Path('/usr')]
+
+            # Cleanup paths
+            tmp = [x for x in tmp if x.is_dir()]
+            tmp = [x.resolve() for x in tmp]
+            roots += tmp
+
+        self.check_and_set_roots(roots, use_system=True)
+
+    def log_details(self) -> str:
+        res = ''
+        if self.modules_found:
+            res += 'found: ' + ', '.join(self.modules_found)
+        if self.modules_missing:
+            if res:
+                res += ' | '
+            res += 'missing: ' + ', '.join(self.modules_missing)
+        return res
+
+    def log_info(self) -> str:
+        if self.boost_root:
+            return self.boost_root.as_posix()
+        return ''
+
+    def _include_dir_from_version_header(self, hfile: Path) -> BoostIncludeDir:
+        # Extract the version with a regex. Using clib_compiler.get_define would
+        # also work, however, this is slower (since it the compiler has to be
+        # invoked) and overkill since the layout of the header is always the same.
+        assert hfile.exists()
+        raw = hfile.read_text(encoding='utf-8')
+        m = re.search(r'#define\s+BOOST_VERSION\s+([0-9]+)', raw)
+        if not m:
+            mlog.debug(f'Failed to extract version information from {hfile}')
+            return BoostIncludeDir(hfile.parents[1], 0)
+        return BoostIncludeDir(hfile.parents[1], int(m.group(1)))
+
+    def _extra_compile_args(self) -> T.List[str]:
+        # BOOST_ALL_DYN_LINK should not be required with the known defines below
+        return ['-DBOOST_ALL_NO_LIB']  # Disable automatic linking
+
+packages['boost'] = BoostDependency
+
+# See https://www.boost.org/doc/libs/1_72_0/more/getting_started/unix-variants.html#library-naming
+# See https://mesonbuild.com/Reference-tables.html#cpu-families
+boost_arch_map = {
+    'aarch64': 'a64',
+    'arc': 'a32',
+    'arm': 'a32',
+    'ia64': 'i64',
+    'mips': 'm32',
+    'mips64': 'm64',
+    'ppc': 'p32',
+    'ppc64': 'p64',
+    'sparc': 's32',
+    'sparc64': 's64',
+    'x86': 'x32',
+    'x86_64': 'x64',
+}
+
+
+####      ---- BEGIN GENERATED ----      ####
+#                                           #
+# Generated with tools/boost_names.py:
+#  - boost version:   1.73.0
+#  - modules found:   159
+#  - libraries found: 43
+#
+
+class BoostLibrary():
+    def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]):
+        self.name = name
+        self.shared = shared
+        self.static = static
+        self.single = single
+        self.multi = multi
+
+class BoostModule():
+    def __init__(self, name: str, key: str, desc: str, libs: T.List[str]):
+        self.name = name
+        self.key = key
+        self.desc = desc
+        self.libs = libs
+
+
+# dict of all know libraries with additional compile options
+boost_libraries = {
+    'boost_atomic': BoostLibrary(
+        name='boost_atomic',
+        shared=['-DBOOST_ATOMIC_DYN_LINK=1'],
+        static=['-DBOOST_ATOMIC_STATIC_LINK=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_chrono': BoostLibrary(
+        name='boost_chrono',
+        shared=['-DBOOST_CHRONO_DYN_LINK=1'],
+        static=['-DBOOST_CHRONO_STATIC_LINK=1'],
+        single=['-DBOOST_CHRONO_THREAD_DISABLED'],
+        multi=[],
+    ),
+    'boost_container': BoostLibrary(
+        name='boost_container',
+        shared=['-DBOOST_CONTAINER_DYN_LINK=1'],
+        static=['-DBOOST_CONTAINER_STATIC_LINK=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_context': BoostLibrary(
+        name='boost_context',
+        shared=['-DBOOST_CONTEXT_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_contract': BoostLibrary(
+        name='boost_contract',
+        shared=['-DBOOST_CONTRACT_DYN_LINK'],
+        static=['-DBOOST_CONTRACT_STATIC_LINK'],
+        single=['-DBOOST_CONTRACT_DISABLE_THREADS'],
+        multi=[],
+    ),
+    'boost_coroutine': BoostLibrary(
+        name='boost_coroutine',
+        shared=['-DBOOST_COROUTINES_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_date_time': BoostLibrary(
+        name='boost_date_time',
+        shared=['-DBOOST_DATE_TIME_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_exception': BoostLibrary(
+        name='boost_exception',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_fiber': BoostLibrary(
+        name='boost_fiber',
+        shared=['-DBOOST_FIBERS_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_fiber_numa': BoostLibrary(
+        name='boost_fiber_numa',
+        shared=['-DBOOST_FIBERS_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_filesystem': BoostLibrary(
+        name='boost_filesystem',
+        shared=['-DBOOST_FILESYSTEM_DYN_LINK=1'],
+        static=['-DBOOST_FILESYSTEM_STATIC_LINK=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_graph': BoostLibrary(
+        name='boost_graph',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_iostreams': BoostLibrary(
+        name='boost_iostreams',
+        shared=['-DBOOST_IOSTREAMS_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_locale': BoostLibrary(
+        name='boost_locale',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_log': BoostLibrary(
+        name='boost_log',
+        shared=['-DBOOST_LOG_DYN_LINK=1'],
+        static=[],
+        single=['-DBOOST_LOG_NO_THREADS'],
+        multi=[],
+    ),
+    'boost_log_setup': BoostLibrary(
+        name='boost_log_setup',
+        shared=['-DBOOST_LOG_SETUP_DYN_LINK=1'],
+        static=[],
+        single=['-DBOOST_LOG_NO_THREADS'],
+        multi=[],
+    ),
+    'boost_math_c99': BoostLibrary(
+        name='boost_math_c99',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_math_c99f': BoostLibrary(
+        name='boost_math_c99f',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_math_c99l': BoostLibrary(
+        name='boost_math_c99l',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_math_tr1': BoostLibrary(
+        name='boost_math_tr1',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_math_tr1f': BoostLibrary(
+        name='boost_math_tr1f',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_math_tr1l': BoostLibrary(
+        name='boost_math_tr1l',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_mpi': BoostLibrary(
+        name='boost_mpi',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_nowide': BoostLibrary(
+        name='boost_nowide',
+        shared=['-DBOOST_NOWIDE_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_prg_exec_monitor': BoostLibrary(
+        name='boost_prg_exec_monitor',
+        shared=['-DBOOST_TEST_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_program_options': BoostLibrary(
+        name='boost_program_options',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_random': BoostLibrary(
+        name='boost_random',
+        shared=['-DBOOST_RANDOM_DYN_LINK'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_regex': BoostLibrary(
+        name='boost_regex',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_serialization': BoostLibrary(
+        name='boost_serialization',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_addr2line': BoostLibrary(
+        name='boost_stacktrace_addr2line',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_backtrace': BoostLibrary(
+        name='boost_stacktrace_backtrace',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_basic': BoostLibrary(
+        name='boost_stacktrace_basic',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_noop': BoostLibrary(
+        name='boost_stacktrace_noop',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_windbg': BoostLibrary(
+        name='boost_stacktrace_windbg',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_stacktrace_windbg_cached': BoostLibrary(
+        name='boost_stacktrace_windbg_cached',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_system': BoostLibrary(
+        name='boost_system',
+        shared=['-DBOOST_SYSTEM_DYN_LINK=1'],
+        static=['-DBOOST_SYSTEM_STATIC_LINK=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_test_exec_monitor': BoostLibrary(
+        name='boost_test_exec_monitor',
+        shared=['-DBOOST_TEST_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_thread': BoostLibrary(
+        name='boost_thread',
+        shared=['-DBOOST_THREAD_BUILD_DLL=1', '-DBOOST_THREAD_USE_DLL=1'],
+        static=['-DBOOST_THREAD_BUILD_LIB=1', '-DBOOST_THREAD_USE_LIB=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_timer': BoostLibrary(
+        name='boost_timer',
+        shared=['-DBOOST_TIMER_DYN_LINK=1'],
+        static=['-DBOOST_TIMER_STATIC_LINK=1'],
+        single=[],
+        multi=[],
+    ),
+    'boost_type_erasure': BoostLibrary(
+        name='boost_type_erasure',
+        shared=['-DBOOST_TYPE_ERASURE_DYN_LINK'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_unit_test_framework': BoostLibrary(
+        name='boost_unit_test_framework',
+        shared=['-DBOOST_TEST_DYN_LINK=1'],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_wave': BoostLibrary(
+        name='boost_wave',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+    'boost_wserialization': BoostLibrary(
+        name='boost_wserialization',
+        shared=[],
+        static=[],
+        single=[],
+        multi=[],
+    ),
+}
+
+#                                           #
+####       ---- END GENERATED ----       ####
diff --git a/vendored-meson/meson/mesonbuild/dependencies/cmake.py b/vendored-meson/meson/mesonbuild/dependencies/cmake.py
new file mode 100644
index 000000000000..8827c9abdcd8
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/cmake.py
@@ -0,0 +1,654 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from .base import ExternalDependency, DependencyException, DependencyTypeName
+from ..mesonlib import is_windows, MesonException, PerMachine, stringlistify, extract_as_list
+from ..cmake import CMakeExecutor, CMakeTraceParser, CMakeException, CMakeToolchain, CMakeExecScope, check_cmake_args, resolve_cmake_trace_targets, cmake_is_debug
+from .. import mlog
+import importlib.resources
+from pathlib import Path
+import functools
+import re
+import os
+import shutil
+import textwrap
+import typing as T
+
+if T.TYPE_CHECKING:
+    from ..cmake import CMakeTarget
+    from ..environment import Environment
+    from ..envconfig import MachineInfo
+
+class CMakeInfo(T.NamedTuple):
+    module_paths: T.List[str]
+    cmake_root: str
+    archs: T.List[str]
+    common_paths: T.List[str]
+
+class CMakeDependency(ExternalDependency):
+    # The class's copy of the CMake path. Avoids having to search for it
+    # multiple times in the same Meson invocation.
+    class_cmakeinfo: PerMachine[T.Optional[CMakeInfo]] = PerMachine(None, None)
+    # Version string for the minimum CMake version
+    class_cmake_version = '>=3.4'
+    # CMake generators to try (empty for no generator)
+    class_cmake_generators = ['', 'Ninja', 'Unix Makefiles', 'Visual Studio 10 2010']
+    class_working_generator: T.Optional[str] = None
+
+    def _gen_exception(self, msg: str) -> DependencyException:
+        return DependencyException(f'Dependency {self.name} not found: {msg}')
+
+    def _main_cmake_file(self) -> str:
+        return 'CMakeLists.txt'
+
+    def _extra_cmake_opts(self) -> T.List[str]:
+        return []
+
+    def _map_module_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]:
+        # Map the input module list to something else
+        # This function will only be executed AFTER the initial CMake
+        # interpreter pass has completed. Thus variables defined in the
+        # CMakeLists.txt can be accessed here.
+        #
+        # Both the modules and components inputs contain the original lists.
+        return modules
+
+    def _map_component_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]:
+        # Map the input components list to something else. This
+        # function will be executed BEFORE the initial CMake interpreter
+        # pass. Thus variables from the CMakeLists.txt can NOT be accessed.
+        #
+        # Both the modules and components inputs contain the original lists.
+        return components
+
+    def _original_module_name(self, module: str) -> str:
+        # Reverse the module mapping done by _map_module_list for
+        # one module
+        return module
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None, force_use_global_compilers: bool = False) -> None:
+        # Gather a list of all languages to support
+        self.language_list = []  # type: T.List[str]
+        if language is None or force_use_global_compilers:
+            compilers = None
+            if kwargs.get('native', False):
+                compilers = environment.coredata.compilers.build
+            else:
+                compilers = environment.coredata.compilers.host
+
+            candidates = ['c', 'cpp', 'fortran', 'objc', 'objcxx']
+            self.language_list += [x for x in candidates if x in compilers]
+        else:
+            self.language_list += [language]
+
+        # Add additional languages if required
+        if 'fortran' in self.language_list:
+            self.language_list += ['c']
+
+        # Ensure that the list is unique
+        self.language_list = list(set(self.language_list))
+
+        super().__init__(DependencyTypeName('cmake'), environment, kwargs, language=language)
+        self.name = name
+        self.is_libtool = False
+        # Store a copy of the CMake path on the object itself so it is
+        # stored in the pickled coredata and recovered.
+        self.cmakebin:  T.Optional[CMakeExecutor] = None
+        self.cmakeinfo: T.Optional[CMakeInfo] = None
+
+        # Where all CMake "build dirs" are located
+        self.cmake_root_dir = environment.scratch_dir
+
+        # T.List of successfully found modules
+        self.found_modules: T.List[str] = []
+
+        # Initialize with None before the first return to avoid
+        # AttributeError exceptions in derived classes
+        self.traceparser: T.Optional[CMakeTraceParser] = None
+
+        # TODO further evaluate always using MachineChoice.BUILD
+        self.cmakebin = CMakeExecutor(environment, CMakeDependency.class_cmake_version, self.for_machine, silent=self.silent)
+        if not self.cmakebin.found():
+            self.cmakebin = None
+            msg = f'CMake binary for machine {self.for_machine} not found. Giving up.'
+            if self.required:
+                raise DependencyException(msg)
+            mlog.debug(msg)
+            return
+
+        # Setup the trace parser
+        self.traceparser = CMakeTraceParser(self.cmakebin.version(), self._get_build_dir(), self.env)
+
+        cm_args = stringlistify(extract_as_list(kwargs, 'cmake_args'))
+        cm_args = check_cmake_args(cm_args)
+        if CMakeDependency.class_cmakeinfo[self.for_machine] is None:
+            CMakeDependency.class_cmakeinfo[self.for_machine] = self._get_cmake_info(cm_args)
+        self.cmakeinfo = CMakeDependency.class_cmakeinfo[self.for_machine]
+        if self.cmakeinfo is None:
+            raise self._gen_exception('Unable to obtain CMake system information')
+
+        package_version = kwargs.get('cmake_package_version', '')
+        if not isinstance(package_version, str):
+            raise DependencyException('Keyword "cmake_package_version" must be a string.')
+        components = [(x, True) for x in stringlistify(extract_as_list(kwargs, 'components'))]
+        modules = [(x, True) for x in stringlistify(extract_as_list(kwargs, 'modules'))]
+        modules += [(x, False) for x in stringlistify(extract_as_list(kwargs, 'optional_modules'))]
+        cm_path = stringlistify(extract_as_list(kwargs, 'cmake_module_path'))
+        cm_path = [x if os.path.isabs(x) else os.path.join(environment.get_source_dir(), x) for x in cm_path]
+        if cm_path:
+            cm_args.append('-DCMAKE_MODULE_PATH=' + ';'.join(cm_path))
+        if not self._preliminary_find_check(name, cm_path, self.cmakebin.get_cmake_prefix_paths(), environment.machines[self.for_machine]):
+            mlog.debug('Preliminary CMake check failed. Aborting.')
+            return
+        self._detect_dep(name, package_version, modules, components, cm_args)
+
+    def __repr__(self) -> str:
+        return f'<{self.__class__.__name__} {self.name}: {self.is_found} {self.version_reqs}>'
+
+    def _get_cmake_info(self, cm_args: T.List[str]) -> T.Optional[CMakeInfo]:
+        mlog.debug("Extracting basic cmake information")
+
+        # Try different CMake generators since specifying no generator may fail
+        # in cygwin for some reason
+        gen_list = []
+        # First try the last working generator
+        if CMakeDependency.class_working_generator is not None:
+            gen_list += [CMakeDependency.class_working_generator]
+        gen_list += CMakeDependency.class_cmake_generators
+
+        temp_parser = CMakeTraceParser(self.cmakebin.version(), self._get_build_dir(), self.env)
+        toolchain = CMakeToolchain(self.cmakebin, self.env, self.for_machine, CMakeExecScope.DEPENDENCY, self._get_build_dir())
+        toolchain.write()
+
+        for i in gen_list:
+            mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto'))
+
+            # Prepare options
+            cmake_opts = temp_parser.trace_args() + toolchain.get_cmake_args() + ['.']
+            cmake_opts += cm_args
+            if len(i) > 0:
+                cmake_opts = ['-G', i] + cmake_opts
+
+            # Run CMake
+            ret1, out1, err1 = self._call_cmake(cmake_opts, 'CMakePathInfo.txt')
+
+            # Current generator was successful
+            if ret1 == 0:
+                CMakeDependency.class_working_generator = i
+                break
+
+            mlog.debug(f'CMake failed to gather system information for generator {i} with error code {ret1}')
+            mlog.debug(f'OUT:\n{out1}\n\n\nERR:\n{err1}\n\n')
+
+        # Check if any generator succeeded
+        if ret1 != 0:
+            return None
+
+        try:
+            temp_parser.parse(err1)
+        except MesonException:
+            return None
+
+        def process_paths(l: T.List[str]) -> T.Set[str]:
+            if is_windows():
+                # Cannot split on ':' on Windows because its in the drive letter
+                tmp = [x.split(os.pathsep) for x in l]
+            else:
+                # https://github.com/mesonbuild/meson/issues/7294
+                tmp = [re.split(r':|;', x) for x in l]
+            flattened = [x for sublist in tmp for x in sublist]
+            return set(flattened)
+
+        # Extract the variables and sanity check them
+        root_paths_set = process_paths(temp_parser.get_cmake_var('MESON_FIND_ROOT_PATH'))
+        root_paths_set.update(process_paths(temp_parser.get_cmake_var('MESON_CMAKE_SYSROOT')))
+        root_paths = sorted(root_paths_set)
+        root_paths = [x for x in root_paths if os.path.isdir(x)]
+        module_paths_set = process_paths(temp_parser.get_cmake_var('MESON_PATHS_LIST'))
+        rooted_paths: T.List[str] = []
+        for j in [Path(x) for x in root_paths]:
+            for p in [Path(x) for x in module_paths_set]:
+                rooted_paths.append(str(j / p.relative_to(p.anchor)))
+        module_paths = sorted(module_paths_set.union(rooted_paths))
+        module_paths = [x for x in module_paths if os.path.isdir(x)]
+        archs = temp_parser.get_cmake_var('MESON_ARCH_LIST')
+
+        common_paths = ['lib', 'lib32', 'lib64', 'libx32', 'share']
+        for i in archs:
+            common_paths += [os.path.join('lib', i)]
+
+        res = CMakeInfo(
+            module_paths=module_paths,
+            cmake_root=temp_parser.get_cmake_var('MESON_CMAKE_ROOT')[0],
+            archs=archs,
+            common_paths=common_paths,
+        )
+
+        mlog.debug(f'  -- Module search paths:    {res.module_paths}')
+        mlog.debug(f'  -- CMake root:             {res.cmake_root}')
+        mlog.debug(f'  -- CMake architectures:    {res.archs}')
+        mlog.debug(f'  -- CMake lib search paths: {res.common_paths}')
+
+        return res
+
+    @staticmethod
+    @functools.lru_cache(maxsize=None)
+    def _cached_listdir(path: str) -> T.Tuple[T.Tuple[str, str], ...]:
+        try:
+            return tuple((x, str(x).lower()) for x in os.listdir(path))
+        except OSError:
+            return tuple()
+
+    @staticmethod
+    @functools.lru_cache(maxsize=None)
+    def _cached_isdir(path: str) -> bool:
+        try:
+            return os.path.isdir(path)
+        except OSError:
+            return False
+
+    def _preliminary_find_check(self, name: str, module_path: T.List[str], prefix_path: T.List[str], machine: 'MachineInfo') -> bool:
+        lname = str(name).lower()
+
+        # Checks , /cmake, /CMake
+        def find_module(path: str) -> bool:
+            for i in [path, os.path.join(path, 'cmake'), os.path.join(path, 'CMake')]:
+                if not self._cached_isdir(i):
+                    continue
+
+                # Check the directory case insensitive
+                content = self._cached_listdir(i)
+                candidates = ['Find{}.cmake', '{}Config.cmake', '{}-config.cmake']
+                candidates = [x.format(name).lower() for x in candidates]
+                if any(x[1] in candidates for x in content):
+                    return True
+            return False
+
+        # Search in /(lib/|lib*|share) for cmake files
+        def search_lib_dirs(path: str) -> bool:
+            for i in [os.path.join(path, x) for x in self.cmakeinfo.common_paths]:
+                if not self._cached_isdir(i):
+                    continue
+
+                # Check /(lib/|lib*|share)/cmake/*/
+                cm_dir = os.path.join(i, 'cmake')
+                if self._cached_isdir(cm_dir):
+                    content = self._cached_listdir(cm_dir)
+                    content = tuple(x for x in content if x[1].startswith(lname))
+                    for k in content:
+                        if find_module(os.path.join(cm_dir, k[0])):
+                            return True
+
+                # /(lib/|lib*|share)/*/
+                # /(lib/|lib*|share)/*/(cmake|CMake)/
+                content = self._cached_listdir(i)
+                content = tuple(x for x in content if x[1].startswith(lname))
+                for k in content:
+                    if find_module(os.path.join(i, k[0])):
+                        return True
+
+            return False
+
+        # Check the user provided and system module paths
+        for i in module_path + [os.path.join(self.cmakeinfo.cmake_root, 'Modules')]:
+            if find_module(i):
+                return True
+
+        # Check the user provided prefix paths
+        for i in prefix_path:
+            if search_lib_dirs(i):
+                return True
+
+        # Check PATH
+        system_env = []  # type: T.List[str]
+        for i in os.environ.get('PATH', '').split(os.pathsep):
+            if i.endswith('/bin') or i.endswith('\\bin'):
+                i = i[:-4]
+            if i.endswith('/sbin') or i.endswith('\\sbin'):
+                i = i[:-5]
+            system_env += [i]
+
+        # Check the system paths
+        for i in self.cmakeinfo.module_paths + system_env:
+            if find_module(i):
+                return True
+
+            if search_lib_dirs(i):
+                return True
+
+            content = self._cached_listdir(i)
+            content = tuple(x for x in content if x[1].startswith(lname))
+            for k in content:
+                if search_lib_dirs(os.path.join(i, k[0])):
+                    return True
+
+            # Mac framework support
+            if machine.is_darwin():
+                for j in [f'{lname}.framework', f'{lname}.app']:
+                    for k in content:
+                        if k[1] != j:
+                            continue
+                        if find_module(os.path.join(i, k[0], 'Resources')) or find_module(os.path.join(i, k[0], 'Version')):
+                            return True
+
+        # Check the environment path
+        env_path = os.environ.get(f'{name}_DIR')
+        if env_path and find_module(env_path):
+            return True
+
+        # Check the Linux CMake registry
+        linux_reg = Path.home() / '.cmake' / 'packages'
+        for p in [linux_reg / name, linux_reg / lname]:
+            if p.exists():
+                return True
+
+        return False
+
+    def _detect_dep(self, name: str, package_version: str, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]], args: T.List[str]) -> None:
+        # Detect a dependency with CMake using the '--find-package' mode
+        # and the trace output (stderr)
+        #
+        # When the trace output is enabled CMake prints all functions with
+        # parameters to stderr as they are executed. Since CMake 3.4.0
+        # variables ("${VAR}") are also replaced in the trace output.
+        mlog.debug('\nDetermining dependency {!r} with CMake executable '
+                   '{!r}'.format(name, self.cmakebin.executable_path()))
+
+        # Try different CMake generators since specifying no generator may fail
+        # in cygwin for some reason
+        gen_list = []
+        # First try the last working generator
+        if CMakeDependency.class_working_generator is not None:
+            gen_list += [CMakeDependency.class_working_generator]
+        gen_list += CMakeDependency.class_cmake_generators
+
+        # Map the components
+        comp_mapped = self._map_component_list(modules, components)
+        toolchain = CMakeToolchain(self.cmakebin, self.env, self.for_machine, CMakeExecScope.DEPENDENCY, self._get_build_dir())
+        toolchain.write()
+
+        for i in gen_list:
+            mlog.debug('Try CMake generator: {}'.format(i if len(i) > 0 else 'auto'))
+
+            # Prepare options
+            cmake_opts = []
+            cmake_opts += [f'-DNAME={name}']
+            cmake_opts += ['-DARCHS={}'.format(';'.join(self.cmakeinfo.archs))]
+            cmake_opts += [f'-DVERSION={package_version}']
+            cmake_opts += ['-DCOMPS={}'.format(';'.join([x[0] for x in comp_mapped]))]
+            cmake_opts += [f'-DSTATIC={self.static}']
+            cmake_opts += args
+            cmake_opts += self.traceparser.trace_args()
+            cmake_opts += toolchain.get_cmake_args()
+            cmake_opts += self._extra_cmake_opts()
+            cmake_opts += ['.']
+            if len(i) > 0:
+                cmake_opts = ['-G', i] + cmake_opts
+
+            # Run CMake
+            ret1, out1, err1 = self._call_cmake(cmake_opts, self._main_cmake_file())
+
+            # Current generator was successful
+            if ret1 == 0:
+                CMakeDependency.class_working_generator = i
+                break
+
+            mlog.debug(f'CMake failed for generator {i} and package {name} with error code {ret1}')
+            mlog.debug(f'OUT:\n{out1}\n\n\nERR:\n{err1}\n\n')
+
+        # Check if any generator succeeded
+        if ret1 != 0:
+            return
+
+        try:
+            self.traceparser.parse(err1)
+        except CMakeException as e:
+            e2 = self._gen_exception(str(e))
+            if self.required:
+                raise
+            else:
+                self.compile_args = []
+                self.link_args = []
+                self.is_found = False
+                self.reason = e2
+                return
+
+        # Whether the package is found or not is always stored in PACKAGE_FOUND
+        self.is_found = self.traceparser.var_to_bool('PACKAGE_FOUND')
+        if not self.is_found:
+            return
+
+        # Try to detect the version
+        vers_raw = self.traceparser.get_cmake_var('PACKAGE_VERSION')
+
+        if len(vers_raw) > 0:
+            self.version = vers_raw[0]
+            self.version.strip('"\' ')
+
+        # Post-process module list. Used in derived classes to modify the
+        # module list (append prepend a string, etc.).
+        modules = self._map_module_list(modules, components)
+        autodetected_module_list = False
+
+        # Try guessing a CMake target if none is provided
+        if len(modules) == 0:
+            for i in self.traceparser.targets:
+                tg = i.lower()
+                lname = name.lower()
+                if f'{lname}::{lname}' == tg or lname == tg.replace('::', ''):
+                    mlog.debug(f'Guessed CMake target \'{i}\'')
+                    modules = [(i, True)]
+                    autodetected_module_list = True
+                    break
+
+        # Failed to guess a target --> try the old-style method
+        if len(modules) == 0:
+            # Warn when there might be matching imported targets but no automatic match was used
+            partial_modules: T.List[CMakeTarget] = []
+            for k, v in self.traceparser.targets.items():
+                tg = k.lower()
+                lname = name.lower()
+                if tg.startswith(f'{lname}::'):
+                    partial_modules += [v]
+            if partial_modules:
+                mlog.warning(textwrap.dedent(f'''\
+                    Could not find and exact match for the CMake dependency {name}.
+
+                    However, Meson found the following partial matches:
+
+                        {[x.name for x in partial_modules]}
+
+                    Using imported is recommended, since this approach is less error prone
+                    and better supported by Meson. Consider explicitly specifying one of
+                    these in the dependency call with:
+
+                        dependency('{name}', modules: ['{name}::', ...])
+
+                    Meson will now continue to use the old-style {name}_LIBRARIES CMake
+                    variables to extract the dependency information since no explicit
+                    target is currently specified.
+
+                '''))
+                mlog.debug('More info for the partial match targets:')
+                for tgt in partial_modules:
+                    mlog.debug(tgt)
+
+            incDirs = [x for x in self.traceparser.get_cmake_var('PACKAGE_INCLUDE_DIRS') if x]
+            defs = [x for x in self.traceparser.get_cmake_var('PACKAGE_DEFINITIONS') if x]
+            libs_raw = [x for x in self.traceparser.get_cmake_var('PACKAGE_LIBRARIES') if x]
+
+            # CMake has a "fun" API, where certain keywords describing
+            # configurations can be in the *_LIBRARIES variables. See:
+            # - https://github.com/mesonbuild/meson/issues/9197
+            # - https://gitlab.freedesktop.org/libnice/libnice/-/issues/140
+            # - https://cmake.org/cmake/help/latest/command/target_link_libraries.html#overview  (the last point in the section)
+            libs: T.List[str] = []
+            cfg_matches = True
+            is_debug = cmake_is_debug(self.env)
+            cm_tag_map = {'debug': is_debug, 'optimized': not is_debug, 'general': True}
+            for i in libs_raw:
+                if i.lower() in cm_tag_map:
+                    cfg_matches = cm_tag_map[i.lower()]
+                    continue
+                if cfg_matches:
+                    libs += [i]
+                # According to the CMake docs, a keyword only works for the
+                # directly the following item and all items without a keyword
+                # are implicitly `general`
+                cfg_matches = True
+
+            # Try to use old style variables if no module is specified
+            if len(libs) > 0:
+                self.compile_args = [f'-I{x}' for x in incDirs] + defs
+                self.link_args = []
+                for j in libs:
+                    rtgt = resolve_cmake_trace_targets(j, self.traceparser, self.env, clib_compiler=self.clib_compiler)
+                    self.link_args += rtgt.libraries
+                    self.compile_args += [f'-I{x}' for x in rtgt.include_directories]
+                    self.compile_args += rtgt.public_compile_opts
+                mlog.debug(f'using old-style CMake variables for dependency {name}')
+                mlog.debug(f'Include Dirs:         {incDirs}')
+                mlog.debug(f'Compiler Definitions: {defs}')
+                mlog.debug(f'Libraries:            {libs}')
+                return
+
+            # Even the old-style approach failed. Nothing else we can do here
+            self.is_found = False
+            raise self._gen_exception('CMake: failed to guess a CMake target for {}.\n'
+                                      'Try to explicitly specify one or more targets with the "modules" property.\n'
+                                      'Valid targets are:\n{}'.format(name, list(self.traceparser.targets.keys())))
+
+        # Set dependencies with CMake targets
+        # recognise arguments we should pass directly to the linker
+        incDirs = []
+        compileOptions = []
+        libraries = []
+
+        for i, required in modules:
+            if i not in self.traceparser.targets:
+                if not required:
+                    mlog.warning('CMake: T.Optional module', mlog.bold(self._original_module_name(i)), 'for', mlog.bold(name), 'was not found')
+                    continue
+                raise self._gen_exception('CMake: invalid module {} for {}.\n'
+                                          'Try to explicitly specify one or more targets with the "modules" property.\n'
+                                          'Valid targets are:\n{}'.format(self._original_module_name(i), name, list(self.traceparser.targets.keys())))
+
+            if not autodetected_module_list:
+                self.found_modules += [i]
+
+            rtgt = resolve_cmake_trace_targets(i, self.traceparser, self.env,
+                                               clib_compiler=self.clib_compiler,
+                                               not_found_warning=lambda x:
+                                                   mlog.warning('CMake: Dependency', mlog.bold(x), 'for', mlog.bold(name), 'was not found')
+                                               )
+            incDirs += rtgt.include_directories
+            compileOptions += rtgt.public_compile_opts
+            libraries += rtgt.libraries + rtgt.link_flags
+
+        # Make sure all elements in the lists are unique and sorted
+        incDirs = sorted(set(incDirs))
+        compileOptions = sorted(set(compileOptions))
+        libraries = sorted(set(libraries))
+
+        mlog.debug(f'Include Dirs:         {incDirs}')
+        mlog.debug(f'Compiler Options:     {compileOptions}')
+        mlog.debug(f'Libraries:            {libraries}')
+
+        self.compile_args = compileOptions + [f'-I{x}' for x in incDirs]
+        self.link_args = libraries
+
+    def _get_build_dir(self) -> Path:
+        build_dir = Path(self.cmake_root_dir) / f'cmake_{self.name}'
+        build_dir.mkdir(parents=True, exist_ok=True)
+        return build_dir
+
+    def _setup_cmake_dir(self, cmake_file: str) -> Path:
+        # Setup the CMake build environment and return the "build" directory
+        build_dir = self._get_build_dir()
+
+        # Remove old CMake cache so we can try out multiple generators
+        cmake_cache = build_dir / 'CMakeCache.txt'
+        cmake_files = build_dir / 'CMakeFiles'
+        if cmake_cache.exists():
+            cmake_cache.unlink()
+        shutil.rmtree(cmake_files.as_posix(), ignore_errors=True)
+
+        # Insert language parameters into the CMakeLists.txt and write new CMakeLists.txt
+        cmake_txt = importlib.resources.read_text('mesonbuild.dependencies.data', cmake_file, encoding = 'utf-8')
+
+        # In general, some Fortran CMake find_package() also require C language enabled,
+        # even if nothing from C is directly used. An easy Fortran example that fails
+        # without C language is
+        #   find_package(Threads)
+        # To make this general to
+        # any other language that might need this, we use a list for all
+        # languages and expand in the cmake Project(... LANGUAGES ...) statement.
+        from ..cmake import language_map
+        cmake_language = [language_map[x] for x in self.language_list if x in language_map]
+        if not cmake_language:
+            cmake_language += ['NONE']
+
+        cmake_txt = textwrap.dedent("""
+            cmake_minimum_required(VERSION ${{CMAKE_VERSION}})
+            project(MesonTemp LANGUAGES {})
+        """).format(' '.join(cmake_language)) + cmake_txt
+
+        cm_file = build_dir / 'CMakeLists.txt'
+        cm_file.write_text(cmake_txt, encoding='utf-8')
+        mlog.cmd_ci_include(cm_file.absolute().as_posix())
+
+        return build_dir
+
+    def _call_cmake(self,
+                    args: T.List[str],
+                    cmake_file: str,
+                    env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, T.Optional[str], T.Optional[str]]:
+        build_dir = self._setup_cmake_dir(cmake_file)
+        return self.cmakebin.call(args, build_dir, env=env)
+
+    @staticmethod
+    def log_tried() -> str:
+        return 'cmake'
+
+    def log_details(self) -> str:
+        modules = [self._original_module_name(x) for x in self.found_modules]
+        modules = sorted(set(modules))
+        if modules:
+            return 'modules: ' + ', '.join(modules)
+        return ''
+
+    def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+                     configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+                     default_value: T.Optional[str] = None,
+                     pkgconfig_define: T.Optional[T.List[str]] = None) -> str:
+        if cmake and self.traceparser is not None:
+            try:
+                v = self.traceparser.vars[cmake]
+            except KeyError:
+                pass
+            else:
+                # CMake does NOT have a list datatype. We have no idea whether
+                # anything is a string or a string-separated-by-; Internally,
+                # we treat them as the latter and represent everything as a
+                # list, because it is convenient when we are mostly handling
+                # imported targets, which have various properties that are
+                # actually lists.
+                #
+                # As a result we need to convert them back to strings when grabbing
+                # raw variables the user requested.
+                return ';'.join(v)
+        if default_value is not None:
+            return default_value
+        raise DependencyException(f'Could not get cmake variable and no default provided for {self!r}')
diff --git a/vendored-meson/meson/mesonbuild/dependencies/coarrays.py b/vendored-meson/meson/mesonbuild/dependencies/coarrays.py
new file mode 100644
index 000000000000..5cb855614f73
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/coarrays.py
@@ -0,0 +1,89 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import functools
+import typing as T
+
+from .base import DependencyMethods, detect_compiler, SystemDependency
+from .cmake import CMakeDependency
+from .detect import packages
+from .pkgconfig import PkgConfigDependency
+from .factory import factory_methods
+
+if T.TYPE_CHECKING:
+    from . factory import DependencyGenerator
+    from ..environment import Environment, MachineChoice
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE, DependencyMethods.SYSTEM})
+def coarray_factory(env: 'Environment',
+                    for_machine: 'MachineChoice',
+                    kwargs: T.Dict[str, T.Any],
+                    methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+    fcid = detect_compiler('coarray', env, for_machine, 'fortran').get_id()
+    candidates: T.List['DependencyGenerator'] = []
+
+    if fcid == 'gcc':
+        # OpenCoarrays is the most commonly used method for Fortran Coarray with GCC
+        if DependencyMethods.PKGCONFIG in methods:
+            for pkg in ['caf-openmpi', 'caf']:
+                candidates.append(functools.partial(
+                    PkgConfigDependency, pkg, env, kwargs, language='fortran'))
+
+        if DependencyMethods.CMAKE in methods:
+            if 'modules' not in kwargs:
+                kwargs['modules'] = 'OpenCoarrays::caf_mpi'
+            candidates.append(functools.partial(
+                CMakeDependency, 'OpenCoarrays', env, kwargs, language='fortran'))
+
+    if DependencyMethods.SYSTEM in methods:
+        candidates.append(functools.partial(CoarrayDependency, env, kwargs))
+
+    return candidates
+packages['coarray'] = coarray_factory
+
+
+class CoarrayDependency(SystemDependency):
+    """
+    Coarrays are a Fortran 2008 feature.
+
+    Coarrays are sometimes implemented via external library (GCC+OpenCoarrays),
+    while other compilers just build in support (Cray, IBM, Intel, NAG).
+    Coarrays may be thought of as a high-level language abstraction of
+    low-level MPI calls.
+    """
+    def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__('coarray', environment, kwargs, language='fortran')
+        kwargs['required'] = False
+        kwargs['silent'] = True
+
+        cid = self.get_compiler().get_id()
+        if cid == 'gcc':
+            # Fallback to single image
+            self.compile_args = ['-fcoarray=single']
+            self.version = 'single image (fallback)'
+            self.is_found = True
+        elif cid == 'intel':
+            # Coarrays are built into Intel compilers, no external library needed
+            self.is_found = True
+            self.link_args = ['-coarray=shared']
+            self.compile_args = self.link_args
+        elif cid == 'intel-cl':
+            # Coarrays are built into Intel compilers, no external library needed
+            self.is_found = True
+            self.compile_args = ['/Qcoarray:shared']
+        elif cid == 'nagfor':
+            # NAG doesn't require any special arguments for Coarray
+            self.is_found = True
diff --git a/vendored-meson/meson/mesonbuild/dependencies/configtool.py b/vendored-meson/meson/mesonbuild/dependencies/configtool.py
new file mode 100644
index 000000000000..5a4294e7dccc
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/configtool.py
@@ -0,0 +1,187 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from .base import ExternalDependency, DependencyException, DependencyTypeName
+from ..mesonlib import listify, Popen_safe, Popen_safe_logged, split_args, version_compare, version_compare_many
+from ..programs import find_external_program
+from .. import mlog
+import re
+import typing as T
+
+from mesonbuild import mesonlib
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+
+class ConfigToolDependency(ExternalDependency):
+
+    """Class representing dependencies found using a config tool.
+
+    Takes the following extra keys in kwargs that it uses internally:
+    :tools List[str]: A list of tool names to use
+    :version_arg str: The argument to pass to the tool to get it's version
+    :skip_version str: The argument to pass to the tool to ignore its version
+        (if ``version_arg`` fails, but it may start accepting it in the future)
+        Because some tools are stupid and don't accept --version
+    :returncode_value int: The value of the correct returncode
+        Because some tools are stupid and don't return 0
+    """
+
+    tools: T.Optional[T.List[str]] = None
+    tool_name: T.Optional[str] = None
+    version_arg = '--version'
+    skip_version: T.Optional[str] = None
+    allow_default_for_cross = False
+    __strip_version = re.compile(r'^[0-9][0-9.]+')
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None):
+        super().__init__(DependencyTypeName('config-tool'), environment, kwargs, language=language)
+        self.name = name
+        # You may want to overwrite the class version in some cases
+        self.tools = listify(kwargs.get('tools', self.tools))
+        if not self.tool_name:
+            self.tool_name = self.tools[0]
+        if 'version_arg' in kwargs:
+            self.version_arg = kwargs['version_arg']
+
+        req_version_raw = kwargs.get('version', None)
+        if req_version_raw is not None:
+            req_version = mesonlib.stringlistify(req_version_raw)
+        else:
+            req_version = []
+        tool, version = self.find_config(req_version, kwargs.get('returncode_value', 0))
+        self.config = tool
+        self.is_found = self.report_config(version, req_version)
+        if not self.is_found:
+            self.config = None
+            return
+        self.version = version
+
+    def _sanitize_version(self, version: str) -> str:
+        """Remove any non-numeric, non-point version suffixes."""
+        m = self.__strip_version.match(version)
+        if m:
+            # Ensure that there isn't a trailing '.', such as an input like
+            # `1.2.3.git-1234`
+            return m.group(0).rstrip('.')
+        return version
+
+    def find_config(self, versions: T.List[str], returncode: int = 0) \
+            -> T.Tuple[T.Optional[T.List[str]], T.Optional[str]]:
+        """Helper method that searches for config tool binaries in PATH and
+        returns the one that best matches the given version requirements.
+        """
+        best_match: T.Tuple[T.Optional[T.List[str]], T.Optional[str]] = (None, None)
+        for potential_bin in find_external_program(
+                self.env, self.for_machine, self.tool_name,
+                self.tool_name, self.tools, allow_default_for_cross=self.allow_default_for_cross):
+            if not potential_bin.found():
+                continue
+            tool = potential_bin.get_command()
+            try:
+                p, out = Popen_safe(tool + [self.version_arg])[:2]
+            except (FileNotFoundError, PermissionError):
+                continue
+            if p.returncode != returncode:
+                if self.skip_version:
+                    # maybe the executable is valid even if it doesn't support --version
+                    p = Popen_safe(tool + [self.skip_version])[0]
+                    if p.returncode != returncode:
+                        continue
+                else:
+                    continue
+
+            out = self._sanitize_version(out.strip())
+            # Some tools, like pcap-config don't supply a version, but also
+            # don't fail with --version, in that case just assume that there is
+            # only one version and return it.
+            if not out:
+                return (tool, None)
+            if versions:
+                is_found = version_compare_many(out, versions)[0]
+                # This allows returning a found version without a config tool,
+                # which is useful to inform the user that you found version x,
+                # but y was required.
+                if not is_found:
+                    tool = None
+            if best_match[1]:
+                if version_compare(out, '> {}'.format(best_match[1])):
+                    best_match = (tool, out)
+            else:
+                best_match = (tool, out)
+
+        return best_match
+
+    def report_config(self, version: T.Optional[str], req_version: T.List[str]) -> bool:
+        """Helper method to print messages about the tool."""
+
+        found_msg: T.List[T.Union[str, mlog.AnsiDecorator]] = [mlog.bold(self.tool_name), 'found:']
+
+        if self.config is None:
+            found_msg.append(mlog.red('NO'))
+            if version is not None and req_version:
+                found_msg.append(f'found {version!r} but need {req_version!r}')
+            elif req_version:
+                found_msg.append(f'need {req_version!r}')
+        else:
+            found_msg += [mlog.green('YES'), '({})'.format(' '.join(self.config)), version]
+
+        mlog.log(*found_msg)
+
+        return self.config is not None
+
+    def get_config_value(self, args: T.List[str], stage: str) -> T.List[str]:
+        p, out, err = Popen_safe_logged(self.config + args)
+        if p.returncode != 0:
+            if self.required:
+                raise DependencyException(f'Could not generate {stage} for {self.name}.\n{err}')
+            return []
+        return split_args(out)
+
+    def get_configtool_variable(self, variable_name: str) -> str:
+        p, out, _ = Popen_safe(self.config + [f'--{variable_name}'])
+        if p.returncode != 0:
+            if self.required:
+                raise DependencyException(
+                    'Could not get variable "{}" for dependency {}'.format(
+                        variable_name, self.name))
+        variable = out.strip()
+        mlog.debug(f'Got config-tool variable {variable_name} : {variable}')
+        return variable
+
+    @staticmethod
+    def log_tried() -> str:
+        return 'config-tool'
+
+    def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+                     configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+                     default_value: T.Optional[str] = None,
+                     pkgconfig_define: T.Optional[T.List[str]] = None) -> str:
+        if configtool:
+            # In the not required case '' (empty string) will be returned if the
+            # variable is not found. Since '' is a valid value to return we
+            # set required to True here to force and error, and use the
+            # finally clause to ensure it's restored.
+            restore = self.required
+            self.required = True
+            try:
+                return self.get_configtool_variable(configtool)
+            except DependencyException:
+                pass
+            finally:
+                self.required = restore
+        if default_value is not None:
+            return default_value
+        raise DependencyException(f'Could not get config-tool variable and no default provided for {self!r}')
diff --git a/vendored-meson/meson/mesonbuild/dependencies/cuda.py b/vendored-meson/meson/mesonbuild/dependencies/cuda.py
new file mode 100644
index 000000000000..af0ae4b1f569
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/cuda.py
@@ -0,0 +1,295 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import glob
+import re
+import os
+import typing as T
+from pathlib import Path
+
+from .. import mesonlib
+from .. import mlog
+from ..environment import detect_cpu_family
+from .base import DependencyException, SystemDependency
+from .detect import packages
+
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+    from ..compilers import Compiler
+
+    TV_ResultTuple = T.Tuple[T.Optional[str], T.Optional[str], bool]
+
+class CudaDependency(SystemDependency):
+
+    supported_languages = ['cuda', 'cpp', 'c'] # see also _default_language
+
+    def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        compilers = environment.coredata.compilers[self.get_for_machine_from_kwargs(kwargs)]
+        language = self._detect_language(compilers)
+        if language not in self.supported_languages:
+            raise DependencyException(f'Language \'{language}\' is not supported by the CUDA Toolkit. Supported languages are {self.supported_languages}.')
+
+        super().__init__('cuda', environment, kwargs, language=language)
+        self.lib_modules: T.Dict[str, T.List[str]] = {}
+        self.requested_modules = self.get_requested(kwargs)
+        if 'cudart' not in self.requested_modules:
+            self.requested_modules = ['cudart'] + self.requested_modules
+
+        (self.cuda_path, self.version, self.is_found) = self._detect_cuda_path_and_version()
+        if not self.is_found:
+            return
+
+        if not os.path.isabs(self.cuda_path):
+            raise DependencyException(f'CUDA Toolkit path must be absolute, got \'{self.cuda_path}\'.')
+
+        # nvcc already knows where to find the CUDA Toolkit, but if we're compiling
+        # a mixed C/C++/CUDA project, we still need to make the include dir searchable
+        if self.language != 'cuda' or len(compilers) > 1:
+            self.incdir = os.path.join(self.cuda_path, 'include')
+            self.compile_args += [f'-I{self.incdir}']
+
+        if self.language != 'cuda':
+            arch_libdir = self._detect_arch_libdir()
+            self.libdir = os.path.join(self.cuda_path, arch_libdir)
+            mlog.debug('CUDA library directory is', mlog.bold(self.libdir))
+        else:
+            self.libdir = None
+
+        self.is_found = self._find_requested_libraries()
+
+    @classmethod
+    def _detect_language(cls, compilers: T.Dict[str, 'Compiler']) -> str:
+        for lang in cls.supported_languages:
+            if lang in compilers:
+                return lang
+        return list(compilers.keys())[0]
+
+    def _detect_cuda_path_and_version(self) -> TV_ResultTuple:
+        self.env_var = self._default_path_env_var()
+        mlog.debug('Default path env var:', mlog.bold(self.env_var))
+
+        version_reqs = self.version_reqs
+        if self.language == 'cuda':
+            nvcc_version = self._strip_patch_version(self.get_compiler().version)
+            mlog.debug('nvcc version:', mlog.bold(nvcc_version))
+            if version_reqs:
+                # make sure nvcc version satisfies specified version requirements
+                (found_some, not_found, found) = mesonlib.version_compare_many(nvcc_version, version_reqs)
+                if not_found:
+                    msg = f'The current nvcc version {nvcc_version} does not satisfy the specified CUDA Toolkit version requirements {version_reqs}.'
+                    return self._report_dependency_error(msg, (None, None, False))
+
+            # use nvcc version to find a matching CUDA Toolkit
+            version_reqs = [f'={nvcc_version}']
+        else:
+            nvcc_version = None
+
+        paths = [(path, self._cuda_toolkit_version(path), default) for (path, default) in self._cuda_paths()]
+        if version_reqs:
+            return self._find_matching_toolkit(paths, version_reqs, nvcc_version)
+
+        defaults = [(path, version) for (path, version, default) in paths if default]
+        if defaults:
+            return (defaults[0][0], defaults[0][1], True)
+
+        platform_msg = 'set the CUDA_PATH environment variable' if self._is_windows() \
+            else 'set the CUDA_PATH environment variable/create the \'/usr/local/cuda\' symbolic link'
+        msg = f'Please specify the desired CUDA Toolkit version (e.g. dependency(\'cuda\', version : \'>=10.1\')) or {platform_msg} to point to the location of your desired version.'
+        return self._report_dependency_error(msg, (None, None, False))
+
+    def _find_matching_toolkit(self, paths: T.List[TV_ResultTuple], version_reqs: T.List[str], nvcc_version: T.Optional[str]) -> TV_ResultTuple:
+        # keep the default paths order intact, sort the rest in the descending order
+        # according to the toolkit version
+        part_func: T.Callable[[TV_ResultTuple], bool] = lambda t: not t[2]
+        defaults_it, rest_it = mesonlib.partition(part_func, paths)
+        defaults = list(defaults_it)
+        paths = defaults + sorted(rest_it, key=lambda t: mesonlib.Version(t[1]), reverse=True)
+        mlog.debug(f'Search paths: {paths}')
+
+        if nvcc_version and defaults:
+            default_src = f"the {self.env_var} environment variable" if self.env_var else "the \'/usr/local/cuda\' symbolic link"
+            nvcc_warning = 'The default CUDA Toolkit as designated by {} ({}) doesn\'t match the current nvcc version {} and will be ignored.'.format(default_src, os.path.realpath(defaults[0][0]), nvcc_version)
+        else:
+            nvcc_warning = None
+
+        for (path, version, default) in paths:
+            (found_some, not_found, found) = mesonlib.version_compare_many(version, version_reqs)
+            if not not_found:
+                if not default and nvcc_warning:
+                    mlog.warning(nvcc_warning)
+                return (path, version, True)
+
+        if nvcc_warning:
+            mlog.warning(nvcc_warning)
+        return (None, None, False)
+
+    def _default_path_env_var(self) -> T.Optional[str]:
+        env_vars = ['CUDA_PATH'] if self._is_windows() else ['CUDA_PATH', 'CUDA_HOME', 'CUDA_ROOT']
+        env_vars = [var for var in env_vars if var in os.environ]
+        user_defaults = {os.environ[var] for var in env_vars}
+        if len(user_defaults) > 1:
+            mlog.warning('Environment variables {} point to conflicting toolkit locations ({}). Toolkit selection might produce unexpected results.'.format(', '.join(env_vars), ', '.join(user_defaults)))
+        return env_vars[0] if env_vars else None
+
+    def _cuda_paths(self) -> T.List[T.Tuple[str, bool]]:
+        return ([(os.environ[self.env_var], True)] if self.env_var else []) \
+            + (self._cuda_paths_win() if self._is_windows() else self._cuda_paths_nix())
+
+    def _cuda_paths_win(self) -> T.List[T.Tuple[str, bool]]:
+        env_vars = os.environ.keys()
+        return [(os.environ[var], False) for var in env_vars if var.startswith('CUDA_PATH_')]
+
+    def _cuda_paths_nix(self) -> T.List[T.Tuple[str, bool]]:
+        # include /usr/local/cuda default only if no env_var was found
+        pattern = '/usr/local/cuda-*' if self.env_var else '/usr/local/cuda*'
+        return [(path, os.path.basename(path) == 'cuda') for path in glob.iglob(pattern)]
+
+    toolkit_version_regex = re.compile(r'^CUDA Version\s+(.*)$')
+    path_version_win_regex = re.compile(r'^v(.*)$')
+    path_version_nix_regex = re.compile(r'^cuda-(.*)$')
+    cudart_version_regex = re.compile(r'#define\s+CUDART_VERSION\s+([0-9]+)')
+
+    def _cuda_toolkit_version(self, path: str) -> str:
+        version = self._read_toolkit_version_txt(path)
+        if version:
+            return version
+        version = self._read_cuda_runtime_api_version(path)
+        if version:
+            return version
+
+        mlog.debug('Falling back to extracting version from path')
+        path_version_regex = self.path_version_win_regex if self._is_windows() else self.path_version_nix_regex
+        try:
+            m = path_version_regex.match(os.path.basename(path))
+            if m:
+                return m.group(1)
+            else:
+                mlog.warning(f'Could not detect CUDA Toolkit version for {path}')
+        except Exception as e:
+            mlog.warning(f'Could not detect CUDA Toolkit version for {path}: {e!s}')
+
+        return '0.0'
+
+    def _read_cuda_runtime_api_version(self, path_str: str) -> T.Optional[str]:
+        path = Path(path_str)
+        for i in path.rglob('cuda_runtime_api.h'):
+            raw = i.read_text(encoding='utf-8')
+            m = self.cudart_version_regex.search(raw)
+            if not m:
+                continue
+            try:
+                vers_int = int(m.group(1))
+            except ValueError:
+                continue
+            # use // for floor instead of / which produces a float
+            major = vers_int // 1000                  # type: int
+            minor = (vers_int - major * 1000) // 10   # type: int
+            return f'{major}.{minor}'
+        return None
+
+    def _read_toolkit_version_txt(self, path: str) -> T.Optional[str]:
+        # Read 'version.txt' at the root of the CUDA Toolkit directory to determine the toolkit version
+        version_file_path = os.path.join(path, 'version.txt')
+        try:
+            with open(version_file_path, encoding='utf-8') as version_file:
+                version_str = version_file.readline() # e.g. 'CUDA Version 10.1.168'
+                m = self.toolkit_version_regex.match(version_str)
+                if m:
+                    return self._strip_patch_version(m.group(1))
+        except Exception as e:
+            mlog.debug(f'Could not read CUDA Toolkit\'s version file {version_file_path}: {e!s}')
+
+        return None
+
+    @classmethod
+    def _strip_patch_version(cls, version: str) -> str:
+        return '.'.join(version.split('.')[:2])
+
+    def _detect_arch_libdir(self) -> str:
+        arch = detect_cpu_family(self.env.coredata.compilers.host)
+        machine = self.env.machines[self.for_machine]
+        msg = '{} architecture is not supported in {} version of the CUDA Toolkit.'
+        if machine.is_windows():
+            libdirs = {'x86': 'Win32', 'x86_64': 'x64'}
+            if arch not in libdirs:
+                raise DependencyException(msg.format(arch, 'Windows'))
+            return os.path.join('lib', libdirs[arch])
+        elif machine.is_linux():
+            libdirs = {'x86_64': 'lib64', 'ppc64': 'lib', 'aarch64': 'lib64', 'loongarch64': 'lib64'}
+            if arch not in libdirs:
+                raise DependencyException(msg.format(arch, 'Linux'))
+            return libdirs[arch]
+        elif machine.is_darwin():
+            libdirs = {'x86_64': 'lib64'}
+            if arch not in libdirs:
+                raise DependencyException(msg.format(arch, 'macOS'))
+            return libdirs[arch]
+        else:
+            raise DependencyException('CUDA Toolkit: unsupported platform.')
+
+    def _find_requested_libraries(self) -> bool:
+        all_found = True
+
+        for module in self.requested_modules:
+            args = self.clib_compiler.find_library(module, self.env, [self.libdir] if self.libdir else [])
+            if args is None:
+                self._report_dependency_error(f'Couldn\'t find requested CUDA module \'{module}\'')
+                all_found = False
+            else:
+                mlog.debug(f'Link args for CUDA module \'{module}\' are {args}')
+                self.lib_modules[module] = args
+
+        return all_found
+
+    def _is_windows(self) -> bool:
+        return self.env.machines[self.for_machine].is_windows()
+
+    @T.overload
+    def _report_dependency_error(self, msg: str) -> None: ...
+
+    @T.overload
+    def _report_dependency_error(self, msg: str, ret_val: TV_ResultTuple) -> TV_ResultTuple: ... # noqa: F811
+
+    def _report_dependency_error(self, msg: str, ret_val: T.Optional[TV_ResultTuple] = None) -> T.Optional[TV_ResultTuple]: # noqa: F811
+        if self.required:
+            raise DependencyException(msg)
+
+        mlog.debug(msg)
+        return ret_val
+
+    def log_details(self) -> str:
+        module_str = ', '.join(self.requested_modules)
+        return 'modules: ' + module_str
+
+    def log_info(self) -> str:
+        return self.cuda_path if self.cuda_path else ''
+
+    def get_requested(self, kwargs: T.Dict[str, T.Any]) -> T.List[str]:
+        candidates = mesonlib.extract_as_list(kwargs, 'modules')
+        for c in candidates:
+            if not isinstance(c, str):
+                raise DependencyException('CUDA module argument is not a string.')
+        return candidates
+
+    def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]:
+        args: T.List[str] = []
+        if self.libdir:
+            args += self.clib_compiler.get_linker_search_args(self.libdir)
+        for lib in self.requested_modules:
+            args += self.lib_modules[lib]
+        return args
+
+packages['cuda'] = CudaDependency
diff --git a/vendored-meson/meson/mesonbuild/dependencies/data/CMakeLists.txt b/vendored-meson/meson/mesonbuild/dependencies/data/CMakeLists.txt
new file mode 100644
index 000000000000..d682cb8246dc
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/data/CMakeLists.txt
@@ -0,0 +1,102 @@
+# fail noisily if attempt to use this file without setting:
+# cmake_minimum_required(VERSION ${CMAKE_VERSION})
+# project(... LANGUAGES ...)
+
+cmake_policy(SET CMP0000 NEW)
+
+set(PACKAGE_FOUND FALSE)
+set(_packageName "${NAME}")
+string(TOUPPER "${_packageName}" PACKAGE_NAME)
+
+if("${STATIC}" STREQUAL "True")
+  set("${NAME}_USE_STATIC_LIBS" "ON")
+endif()
+
+while(TRUE)
+  if ("${VERSION}" STREQUAL "")
+    find_package("${NAME}" QUIET COMPONENTS ${COMPS})
+  else()
+    find_package("${NAME}" "${VERSION}" QUIET COMPONENTS ${COMPS})
+  endif()
+
+  # ARCHS has to be set via the CMD interface
+  if(${_packageName}_FOUND OR ${PACKAGE_NAME}_FOUND OR "${ARCHS}" STREQUAL "")
+    break()
+  endif()
+
+  list(GET       ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+  list(REMOVE_AT ARCHS 0)
+endwhile()
+
+if(${_packageName}_FOUND  OR  ${PACKAGE_NAME}_FOUND)
+  set(PACKAGE_FOUND TRUE)
+
+  # Check the following variables:
+  # FOO_VERSION
+  # Foo_VERSION
+  # FOO_VERSION_STRING
+  # Foo_VERSION_STRING
+  if(NOT DEFINED PACKAGE_VERSION)
+    if(DEFINED ${_packageName}_VERSION)
+      set(PACKAGE_VERSION "${${_packageName}_VERSION}")
+    elseif(DEFINED ${PACKAGE_NAME}_VERSION)
+      set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION}")
+    elseif(DEFINED ${_packageName}_VERSION_STRING)
+      set(PACKAGE_VERSION "${${_packageName}_VERSION_STRING}")
+    elseif(DEFINED ${PACKAGE_NAME}_VERSION_STRING)
+      set(PACKAGE_VERSION "${${PACKAGE_NAME}_VERSION_STRING}")
+    endif()
+  endif()
+
+  # Check the following variables:
+  # FOO_LIBRARIES
+  # Foo_LIBRARIES
+  # FOO_LIBS
+  # Foo_LIBS
+  set(libs)
+  if(DEFINED ${_packageName}_LIBRARIES)
+    set(libs ${_packageName}_LIBRARIES)
+  elseif(DEFINED ${PACKAGE_NAME}_LIBRARIES)
+    set(libs ${PACKAGE_NAME}_LIBRARIES)
+  elseif(DEFINED ${_packageName}_LIBS)
+    set(libs ${_packageName}_LIBS)
+  elseif(DEFINED ${PACKAGE_NAME}_LIBS)
+    set(libs ${PACKAGE_NAME}_LIBS)
+  endif()
+
+  # Check the following variables:
+  # FOO_INCLUDE_DIRS
+  # Foo_INCLUDE_DIRS
+  # FOO_INCLUDES
+  # Foo_INCLUDES
+  # FOO_INCLUDE_DIR
+  # Foo_INCLUDE_DIR
+  set(includes)
+  if(DEFINED ${_packageName}_INCLUDE_DIRS)
+    set(includes ${_packageName}_INCLUDE_DIRS)
+  elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIRS)
+    set(includes ${PACKAGE_NAME}_INCLUDE_DIRS)
+  elseif(DEFINED ${_packageName}_INCLUDES)
+    set(includes ${_packageName}_INCLUDES)
+  elseif(DEFINED ${PACKAGE_NAME}_INCLUDES)
+    set(includes ${PACKAGE_NAME}_INCLUDES)
+  elseif(DEFINED ${_packageName}_INCLUDE_DIR)
+    set(includes ${_packageName}_INCLUDE_DIR)
+  elseif(DEFINED ${PACKAGE_NAME}_INCLUDE_DIR)
+    set(includes ${PACKAGE_NAME}_INCLUDE_DIR)
+  endif()
+
+  # Check the following variables:
+  # FOO_DEFINITIONS
+  # Foo_DEFINITIONS
+  set(definitions)
+  if(DEFINED ${_packageName}_DEFINITIONS)
+    set(definitions ${_packageName}_DEFINITIONS)
+  elseif(DEFINED ${PACKAGE_NAME}_DEFINITIONS)
+    set(definitions ${PACKAGE_NAME}_DEFINITIONS)
+  endif()
+
+  set(PACKAGE_INCLUDE_DIRS "${${includes}}")
+  set(PACKAGE_DEFINITIONS  "${${definitions}}")
+  set(PACKAGE_LIBRARIES    "${${libs}}")
+endif()
diff --git a/vendored-meson/meson/mesonbuild/dependencies/data/CMakeListsLLVM.txt b/vendored-meson/meson/mesonbuild/dependencies/data/CMakeListsLLVM.txt
new file mode 100644
index 000000000000..4a9382207b3d
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/data/CMakeListsLLVM.txt
@@ -0,0 +1,204 @@
+# fail noisily if attempt to use this file without setting:
+# cmake_minimum_required(VERSION ${CMAKE_VERSION})
+# project(... LANGUAGES ...)
+
+cmake_policy(SET CMP0000 NEW)
+
+set(PACKAGE_FOUND FALSE)
+
+list(REMOVE_DUPLICATES LLVM_MESON_VERSIONS)
+
+while(TRUE)
+  #Activate CMake version selection
+  foreach(i IN LISTS LLVM_MESON_VERSIONS)
+    find_package(LLVM ${i}
+      CONFIG
+      NAMES ${LLVM_MESON_PACKAGE_NAMES}
+      QUIET)
+    if(LLVM_FOUND)
+      break()
+    endif()
+  endforeach()
+
+  # ARCHS has to be set via the CMD interface
+  if(LLVM_FOUND OR "${ARCHS}" STREQUAL "")
+    break()
+  endif()
+
+  list(GET       ARCHS 0 CMAKE_LIBRARY_ARCHITECTURE)
+  list(REMOVE_AT ARCHS 0)
+endwhile()
+
+function(meson_llvm_cmake_dynamic_available mod out)
+  # Check if we can only compare LLVM_DYLIB_COMPONENTS, because
+  # we do not need complex component translation logic, if all
+  # is covered by one variable
+  if(mod IN_LIST LLVM_DYLIB_COMPONENTS)
+    set(${out} TRUE PARENT_SCOPE)
+    return()
+  elseif((NOT (mod IN_LIST LLVM_DYLIB_COMPONENTS))
+      AND (NOT("${LLVM_DYLIB_COMPONENTS}" STREQUAL "all")))
+    set(${out} FALSE PARENT_SCOPE)
+    return()
+  endif()
+
+  # Complex heuristic to filter all pseudo-components and skip invalid names
+  # LLVM_DYLIB_COMPONENTS will be 'all', because in other case we returned
+  # in previous check. 'all' is also handled there.
+  set(llvm_pseudo_components "native" "backend" "engine" "all-targets")
+  is_llvm_target_specifier(${mod} mod_spec INCLUDED_TARGETS)
+  string(TOUPPER "${LLVM_AVAILABLE_LIBS}" capitalized_libs)
+  string(TOUPPER "${LLVM_TARGETS_TO_BUILD}" capitalized_tgts)
+  if(mod_spec)
+    set(${out} TRUE PARENT_SCOPE)
+  elseif(mod IN_LIST capitalized_tgts)
+    set(${out} TRUE PARENT_SCOPE)
+  elseif(mod IN_LIST llvm_pseudo_components)
+    set(${out} TRUE PARENT_SCOPE)
+  elseif(LLVM${mod} IN_LIST capitalized_libs)
+    set(${out} TRUE PARENT_SCOPE)
+  else()
+    set(${out} FALSE PARENT_SCOPE)
+  endif()
+endfunction()
+
+function(is_static target ret)
+  if(TARGET ${target})
+    get_target_property(target_type ${target} TYPE)
+    if(target_type STREQUAL "STATIC_LIBRARY")
+      set(${ret} TRUE PARENT_SCOPE)
+      return()
+    endif()
+  endif()
+  set(${ret} FALSE PARENT_SCOPE)
+endfunction()
+
+# Concatenate LLVM_MESON_REQUIRED_MODULES and LLVM_MESON_OPTIONAL_MODULES
+set(LLVM_MESON_MODULES ${LLVM_MESON_REQUIRED_MODULES} ${LLVM_MESON_OPTIONAL_MODULES})
+
+
+# Check if LLVM exists in dynamic world
+# Initialization before modules checking
+if(LLVM_FOUND)
+  if(LLVM_MESON_DYLIB AND TARGET LLVM)
+    set(PACKAGE_FOUND TRUE)
+  elseif(NOT LLVM_MESON_DYLIB)
+    # Use LLVMSupport to check if static targets exist
+    set(static_tg FALSE)
+    is_static(LLVMSupport static_tg)
+    if(static_tg)
+      set(PACKAGE_FOUND TRUE)
+    endif(static_tg)
+  endif()
+endif()
+
+if(PACKAGE_FOUND)
+  foreach(mod IN LISTS LLVM_MESON_MODULES)
+    # Reset variables
+    set(out_mods)
+    set(real_mods)
+
+    # Generate a lower and upper case version
+    string(TOLOWER "${mod}" mod_L)
+    string(TOUPPER "${mod}" mod_U)
+
+    # Special case - "all-targets" pseudo target
+    # Just append all targets, if pseudo-target exists
+    if("${mod}" STREQUAL "all-targets")
+      set(mod_L  ${LLVM_TARGETS_TO_BUILD})
+      string(TOUPPER "${LLVM_TARGETS_TO_BUILD}" mod_U)
+    endif()
+
+    # Check if required module is linked is inside libLLVM.so.
+    # If not, skip this module
+    if(LLVM_MESON_DYLIB
+       AND DEFINED LLVM_DYLIB_COMPONENTS)
+        meson_llvm_cmake_dynamic_available(${mod} MOD_F)
+        meson_llvm_cmake_dynamic_available(${mod_L} MOD_L_F)
+        meson_llvm_cmake_dynamic_available(${mod_U} MOD_U_F)
+        if(MOD_F OR MOD_L_F OR MOD_U_F)
+          set(MESON_LLVM_TARGETS_${mod} LLVM)
+        endif()
+    elseif(LLVM_MESON_DYLIB AND (mod IN_LIST LLVM_MESON_REQUIRED_MODULES))
+      # Dynamic was requested, but no required variables set, we cannot continue
+      set(PACKAGE_FOUND FALSE)
+      break()
+    elseif(LLVM_MESON_DYLIB)
+      # Dynamic was requested, and we request optional modules only. Continue
+      continue()
+    else()
+      # CMake only do this for static components, and we
+      # replicate its behaviour
+      # Get the mapped components
+      llvm_map_components_to_libnames(out_mods ${mod} ${mod_L} ${mod_U})
+      list(SORT              out_mods)
+      list(REMOVE_DUPLICATES out_mods)
+
+      # Make sure that the modules exist
+      foreach(i IN LISTS out_mods)
+        set(static_tg FALSE)
+        is_static(${i} static_tg)
+        if(static_tg)
+          list(APPEND real_mods ${i})
+        endif()
+      endforeach()
+
+      # Set the output variables
+      set(MESON_LLVM_TARGETS_${mod} ${real_mods})
+      foreach(i IN LISTS real_mods)
+        set(MESON_TARGET_TO_LLVM_${i} ${mod})
+      endforeach()
+    endif()
+  endforeach()
+
+  # Check the following variables:
+  # LLVM_PACKAGE_VERSION
+  # LLVM_VERSION
+  # LLVM_VERSION_STRING
+  if(NOT DEFINED PACKAGE_VERSION)
+    if(DEFINED LLVM_PACKAGE_VERSION)
+      set(PACKAGE_VERSION "${LLVM_PACKAGE_VERSION}")
+    elseif(DEFINED LLVM_VERSION)
+      set(PACKAGE_VERSION "${LLVM_VERSION}")
+    elseif(DEFINED LLVM_VERSION_STRING)
+      set(PACKAGE_VERSION "${LLVM_VERSION_STRING}")
+    endif()
+  endif()
+
+  # Check the following variables:
+  # LLVM_LIBRARIES
+  # LLVM_LIBS
+  set(libs)
+  #Hardcode LLVM, because we links with libLLVM.so when dynamic
+  if(LLVM_MESON_DYLIB)
+    get_target_property(libs LLVM IMPORTED_LOCATION)
+  elseif(DEFINED LLVM_LIBRARIES)
+    set(libs LLVM_LIBRARIES)
+  elseif(DEFINED LLVM_LIBS)
+    set(libs LLVM_LIBS)
+  endif()
+
+  # Check the following variables:
+  # LLVM_INCLUDE_DIRS
+  # LLVM_INCLUDES
+  # LLVM_INCLUDE_DIR
+  set(includes)
+  if(DEFINED LLVM_INCLUDE_DIRS)
+    set(includes LLVM_INCLUDE_DIRS)
+  elseif(DEFINED LLVM_INCLUDES)
+    set(includes LLVM_INCLUDES)
+  elseif(DEFINED LLVM_INCLUDE_DIR)
+    set(includes LLVM_INCLUDE_DIR)
+  endif()
+
+  # Check the following variables:
+  # LLVM_DEFINITIONS
+  set(definitions)
+  if(DEFINED LLVM_DEFINITIONS)
+    set(definitions LLVM_DEFINITIONS)
+  endif()
+
+  set(PACKAGE_INCLUDE_DIRS "${${includes}}")
+  set(PACKAGE_DEFINITIONS  "${${definitions}}")
+  set(PACKAGE_LIBRARIES    "${${libs}}")
+endif()
diff --git a/vendored-meson/meson/mesonbuild/dependencies/data/CMakePathInfo.txt b/vendored-meson/meson/mesonbuild/dependencies/data/CMakePathInfo.txt
new file mode 100644
index 000000000000..662ec58363e2
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/data/CMakePathInfo.txt
@@ -0,0 +1,31 @@
+cmake_minimum_required(VERSION ${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}.${CMAKE_PATCH_VERSION})
+
+set(TMP_PATHS_LIST)
+list(APPEND TMP_PATHS_LIST ${CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST $ENV{CMAKE_APPBUNDLE_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_PREFIX_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_FRAMEWORK_PATH})
+list(APPEND TMP_PATHS_LIST ${CMAKE_SYSTEM_APPBUNDLE_PATH})
+
+set(LIB_ARCH_LIST)
+if(CMAKE_LIBRARY_ARCHITECTURE_REGEX)
+  file(GLOB implicit_dirs RELATIVE /lib /lib/*-linux-gnu* )
+  foreach(dir ${implicit_dirs})
+    if("${dir}" MATCHES "${CMAKE_LIBRARY_ARCHITECTURE_REGEX}")
+      list(APPEND LIB_ARCH_LIST "${dir}")
+    endif()
+  endforeach()
+endif()
+
+# "Export" these variables:
+set(MESON_ARCH_LIST ${LIB_ARCH_LIST})
+set(MESON_PATHS_LIST ${TMP_PATHS_LIST})
+set(MESON_CMAKE_ROOT ${CMAKE_ROOT})
+set(MESON_CMAKE_SYSROOT ${CMAKE_SYSROOT})
+set(MESON_FIND_ROOT_PATH ${CMAKE_FIND_ROOT_PATH})
+
+message(STATUS ${TMP_PATHS_LIST})
diff --git a/vendored-meson/meson/mesonbuild/dependencies/data/__init__.py b/vendored-meson/meson/mesonbuild/dependencies/data/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/vendored-meson/meson/mesonbuild/dependencies/detect.py b/vendored-meson/meson/mesonbuild/dependencies/detect.py
new file mode 100644
index 000000000000..9428d547bb02
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/detect.py
@@ -0,0 +1,235 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import collections, functools, importlib
+import typing as T
+
+from .base import ExternalDependency, DependencyException, DependencyMethods, NotFoundDependency
+
+from ..mesonlib import listify, MachineChoice, PerMachine
+from .. import mlog
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+    from .factory import DependencyFactory, WrappedFactoryFunc, DependencyGenerator
+
+    TV_DepIDEntry = T.Union[str, bool, int, T.Tuple[str, ...]]
+    TV_DepID = T.Tuple[T.Tuple[str, TV_DepIDEntry], ...]
+    PackageTypes = T.Union[T.Type[ExternalDependency], DependencyFactory, WrappedFactoryFunc]
+
+class DependencyPackages(collections.UserDict):
+    data: T.Dict[str, PackageTypes]
+    defaults: T.Dict[str, str] = {}
+
+    def __missing__(self, key: str) -> PackageTypes:
+        if key in self.defaults:
+            modn = self.defaults[key]
+            importlib.import_module(f'mesonbuild.dependencies.{modn}')
+
+            return self.data[key]
+        raise KeyError(key)
+
+    def __contains__(self, key: object) -> bool:
+        return key in self.defaults or key in self.data
+
+# These must be defined in this file to avoid cyclical references.
+packages = DependencyPackages()
+_packages_accept_language: T.Set[str] = set()
+
+def get_dep_identifier(name: str, kwargs: T.Dict[str, T.Any]) -> 'TV_DepID':
+    identifier: 'TV_DepID' = (('name', name), )
+    from ..interpreter import permitted_dependency_kwargs
+    assert len(permitted_dependency_kwargs) == 19, \
+           'Extra kwargs have been added to dependency(), please review if it makes sense to handle it here'
+    for key, value in kwargs.items():
+        # 'version' is irrelevant for caching; the caller must check version matches
+        # 'native' is handled above with `for_machine`
+        # 'required' is irrelevant for caching; the caller handles it separately
+        # 'fallback' and 'allow_fallback' is not part of the cache because,
+        #     once a dependency has been found through a fallback, it should
+        #     be used for the rest of the Meson run.
+        # 'default_options' is only used in fallback case
+        # 'not_found_message' has no impact on the dependency lookup
+        # 'include_type' is handled after the dependency lookup
+        if key in {'version', 'native', 'required', 'fallback', 'allow_fallback', 'default_options',
+                   'not_found_message', 'include_type'}:
+            continue
+        # All keyword arguments are strings, ints, or lists (or lists of lists)
+        if isinstance(value, list):
+            for i in value:
+                assert isinstance(i, str)
+            value = tuple(frozenset(listify(value)))
+        else:
+            assert isinstance(value, (str, bool, int))
+        identifier = (*identifier, (key, value),)
+    return identifier
+
+display_name_map = {
+    'boost': 'Boost',
+    'cuda': 'CUDA',
+    'dub': 'DUB',
+    'gmock': 'GMock',
+    'gtest': 'GTest',
+    'hdf5': 'HDF5',
+    'llvm': 'LLVM',
+    'mpi': 'MPI',
+    'netcdf': 'NetCDF',
+    'openmp': 'OpenMP',
+    'wxwidgets': 'WxWidgets',
+}
+
+def find_external_dependency(name: str, env: 'Environment', kwargs: T.Dict[str, object], candidates: T.Optional[T.List['DependencyGenerator']] = None) -> T.Union['ExternalDependency', NotFoundDependency]:
+    assert name
+    required = kwargs.get('required', True)
+    if not isinstance(required, bool):
+        raise DependencyException('Keyword "required" must be a boolean.')
+    if not isinstance(kwargs.get('method', ''), str):
+        raise DependencyException('Keyword "method" must be a string.')
+    lname = name.lower()
+    if lname not in _packages_accept_language and 'language' in kwargs:
+        raise DependencyException(f'{name} dependency does not accept "language" keyword argument')
+    if not isinstance(kwargs.get('version', ''), (str, list)):
+        raise DependencyException('Keyword "Version" must be string or list.')
+
+    # display the dependency name with correct casing
+    display_name = display_name_map.get(lname, lname)
+
+    for_machine = MachineChoice.BUILD if kwargs.get('native', False) else MachineChoice.HOST
+
+    type_text = PerMachine('Build-time', 'Run-time')[for_machine] + ' dependency'
+
+    # build a list of dependency methods to try
+    if candidates is None:
+        candidates = _build_external_dependency_list(name, env, for_machine, kwargs)
+
+    pkg_exc: T.List[DependencyException] = []
+    pkgdep:  T.List[ExternalDependency] = []
+    details = ''
+
+    for c in candidates:
+        # try this dependency method
+        try:
+            d = c()
+            d._check_version()
+            pkgdep.append(d)
+        except DependencyException as e:
+            assert isinstance(c, functools.partial), 'for mypy'
+            bettermsg = f'Dependency lookup for {name} with method {c.func.log_tried()!r} failed: {e}'
+            mlog.debug(bettermsg)
+            e.args = (bettermsg,)
+            pkg_exc.append(e)
+        else:
+            pkg_exc.append(None)
+            details = d.log_details()
+            if details:
+                details = '(' + details + ') '
+            if 'language' in kwargs:
+                details += 'for ' + d.language + ' '
+
+            # if the dependency was found
+            if d.found():
+
+                info: mlog.TV_LoggableList = []
+                if d.version:
+                    info.append(mlog.normal_cyan(d.version))
+
+                log_info = d.log_info()
+                if log_info:
+                    info.append('(' + log_info + ')')
+
+                mlog.log(type_text, mlog.bold(display_name), details + 'found:', mlog.green('YES'), *info)
+
+                return d
+
+    # otherwise, the dependency could not be found
+    tried_methods = [d.log_tried() for d in pkgdep if d.log_tried()]
+    if tried_methods:
+        tried = mlog.format_list(tried_methods)
+    else:
+        tried = ''
+
+    mlog.log(type_text, mlog.bold(display_name), details + 'found:', mlog.red('NO'),
+             f'(tried {tried})' if tried else '')
+
+    if required:
+        # if an exception occurred with the first detection method, re-raise it
+        # (on the grounds that it came from the preferred dependency detection
+        # method)
+        if pkg_exc and pkg_exc[0]:
+            raise pkg_exc[0]
+
+        # we have a list of failed ExternalDependency objects, so we can report
+        # the methods we tried to find the dependency
+        raise DependencyException(f'Dependency "{name}" not found' +
+                                  (f', tried {tried}' if tried else ''))
+
+    return NotFoundDependency(name, env)
+
+
+def _build_external_dependency_list(name: str, env: 'Environment', for_machine: MachineChoice,
+                                    kwargs: T.Dict[str, T.Any]) -> T.List['DependencyGenerator']:
+    # First check if the method is valid
+    if 'method' in kwargs and kwargs['method'] not in [e.value for e in DependencyMethods]:
+        raise DependencyException('method {!r} is invalid'.format(kwargs['method']))
+
+    # Is there a specific dependency detector for this dependency?
+    lname = name.lower()
+    if lname in packages:
+        # Create the list of dependency object constructors using a factory
+        # class method, if one exists, otherwise the list just consists of the
+        # constructor
+        if isinstance(packages[lname], type):
+            entry1 = T.cast('T.Type[ExternalDependency]', packages[lname])  # mypy doesn't understand isinstance(..., type)
+            if issubclass(entry1, ExternalDependency):
+                func: T.Callable[[], 'ExternalDependency'] = functools.partial(entry1, env, kwargs)
+                dep = [func]
+        else:
+            entry2 = T.cast('T.Union[DependencyFactory, WrappedFactoryFunc]', packages[lname])
+            dep = entry2(env, for_machine, kwargs)
+        return dep
+
+    candidates: T.List['DependencyGenerator'] = []
+
+    if kwargs.get('method', 'auto') == 'auto':
+        # Just use the standard detection methods.
+        methods = ['pkg-config', 'extraframework', 'cmake']
+    else:
+        # If it's explicitly requested, use that detection method (only).
+        methods = [kwargs['method']]
+
+    # Exclusive to when it is explicitly requested
+    if 'dub' in methods:
+        from .dub import DubDependency
+        candidates.append(functools.partial(DubDependency, name, env, kwargs))
+
+    # Preferred first candidate for auto.
+    if 'pkg-config' in methods:
+        from .pkgconfig import PkgConfigDependency
+        candidates.append(functools.partial(PkgConfigDependency, name, env, kwargs))
+
+    # On OSX only, try framework dependency detector.
+    if 'extraframework' in methods:
+        if env.machines[for_machine].is_darwin():
+            from .framework import ExtraFrameworkDependency
+            candidates.append(functools.partial(ExtraFrameworkDependency, name, env, kwargs))
+
+    # Only use CMake:
+    # - if it's explicitly requested
+    # - as a last resort, since it might not work 100% (see #6113)
+    if 'cmake' in methods:
+        from .cmake import CMakeDependency
+        candidates.append(functools.partial(CMakeDependency, name, env, kwargs))
+
+    return candidates
diff --git a/vendored-meson/meson/mesonbuild/dependencies/dev.py b/vendored-meson/meson/mesonbuild/dependencies/dev.py
new file mode 100644
index 000000000000..ec7015103b7a
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/dev.py
@@ -0,0 +1,720 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies useful for
+# development purposes, such as testing, debugging, etc..
+
+from __future__ import annotations
+
+import glob
+import os
+import re
+import pathlib
+import shutil
+import subprocess
+import typing as T
+import functools
+
+from mesonbuild.interpreterbase.decorators import FeatureDeprecated
+
+from .. import mesonlib, mlog
+from ..environment import get_llvm_tool_names
+from ..mesonlib import version_compare, version_compare_many, search_version, stringlistify, extract_as_list
+from .base import DependencyException, DependencyMethods, detect_compiler, strip_system_includedirs, strip_system_libdirs, SystemDependency, ExternalDependency, DependencyTypeName
+from .cmake import CMakeDependency
+from .configtool import ConfigToolDependency
+from .detect import packages
+from .factory import DependencyFactory
+from .misc import threads_factory
+from .pkgconfig import PkgConfigDependency
+
+if T.TYPE_CHECKING:
+    from ..envconfig import MachineInfo
+    from ..environment import Environment
+    from ..mesonlib import MachineChoice
+    from typing_extensions import TypedDict
+
+    class JNISystemDependencyKW(TypedDict):
+        modules: T.List[str]
+        # FIXME: When dependency() moves to typed Kwargs, this should inherit
+        # from its TypedDict type.
+        version: T.Optional[str]
+
+
+def get_shared_library_suffix(environment: 'Environment', for_machine: MachineChoice) -> str:
+    """This is only guaranteed to work for languages that compile to machine
+    code, not for languages like C# that use a bytecode and always end in .dll
+    """
+    m = environment.machines[for_machine]
+    if m.is_windows():
+        return '.dll'
+    elif m.is_darwin():
+        return '.dylib'
+    return '.so'
+
+
+class GTestDependencySystem(SystemDependency):
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__(name, environment, kwargs, language='cpp')
+        self.main = kwargs.get('main', False)
+        self.src_dirs = ['/usr/src/gtest/src', '/usr/src/googletest/googletest/src']
+        if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
+            self.is_found = False
+            return
+        self.detect()
+
+    def detect(self) -> None:
+        gtest_detect = self.clib_compiler.find_library("gtest", self.env, [])
+        gtest_main_detect = self.clib_compiler.find_library("gtest_main", self.env, [])
+        if gtest_detect and (not self.main or gtest_main_detect):
+            self.is_found = True
+            self.compile_args = []
+            self.link_args = gtest_detect
+            if self.main:
+                self.link_args += gtest_main_detect
+            self.sources = []
+            self.prebuilt = True
+        elif self.detect_srcdir():
+            self.is_found = True
+            self.compile_args = ['-I' + d for d in self.src_include_dirs]
+            self.link_args = []
+            if self.main:
+                self.sources = [self.all_src, self.main_src]
+            else:
+                self.sources = [self.all_src]
+            self.prebuilt = False
+        else:
+            self.is_found = False
+
+    def detect_srcdir(self) -> bool:
+        for s in self.src_dirs:
+            if os.path.exists(s):
+                self.src_dir = s
+                self.all_src = mesonlib.File.from_absolute_file(
+                    os.path.join(self.src_dir, 'gtest-all.cc'))
+                self.main_src = mesonlib.File.from_absolute_file(
+                    os.path.join(self.src_dir, 'gtest_main.cc'))
+                self.src_include_dirs = [os.path.normpath(os.path.join(self.src_dir, '..')),
+                                         os.path.normpath(os.path.join(self.src_dir, '../include')),
+                                         ]
+                return True
+        return False
+
+    def log_info(self) -> str:
+        if self.prebuilt:
+            return 'prebuilt'
+        else:
+            return 'building self'
+
+
+class GTestDependencyPC(PkgConfigDependency):
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        assert name == 'gtest'
+        if kwargs.get('main'):
+            name = 'gtest_main'
+        super().__init__(name, environment, kwargs)
+
+
+class GMockDependencySystem(SystemDependency):
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__(name, environment, kwargs, language='cpp')
+        self.main = kwargs.get('main', False)
+        if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
+            self.is_found = False
+            return
+
+        # If we are getting main() from GMock, we definitely
+        # want to avoid linking in main() from GTest
+        gtest_kwargs = kwargs.copy()
+        if self.main:
+            gtest_kwargs['main'] = False
+
+        # GMock without GTest is pretty much useless
+        # this also mimics the structure given in WrapDB,
+        # where GMock always pulls in GTest
+        found = self._add_sub_dependency(gtest_factory(environment, self.for_machine, gtest_kwargs))
+        if not found:
+            self.is_found = False
+            return
+
+        # GMock may be a library or just source.
+        # Work with both.
+        gmock_detect = self.clib_compiler.find_library("gmock", self.env, [])
+        gmock_main_detect = self.clib_compiler.find_library("gmock_main", self.env, [])
+        if gmock_detect and (not self.main or gmock_main_detect):
+            self.is_found = True
+            self.link_args += gmock_detect
+            if self.main:
+                self.link_args += gmock_main_detect
+            self.prebuilt = True
+            return
+
+        for d in ['/usr/src/googletest/googlemock/src', '/usr/src/gmock/src', '/usr/src/gmock']:
+            if os.path.exists(d):
+                self.is_found = True
+                # Yes, we need both because there are multiple
+                # versions of gmock that do different things.
+                d2 = os.path.normpath(os.path.join(d, '..'))
+                self.compile_args += ['-I' + d, '-I' + d2, '-I' + os.path.join(d2, 'include')]
+                all_src = mesonlib.File.from_absolute_file(os.path.join(d, 'gmock-all.cc'))
+                main_src = mesonlib.File.from_absolute_file(os.path.join(d, 'gmock_main.cc'))
+                if self.main:
+                    self.sources += [all_src, main_src]
+                else:
+                    self.sources += [all_src]
+                self.prebuilt = False
+                return
+
+        self.is_found = False
+
+    def log_info(self) -> str:
+        if self.prebuilt:
+            return 'prebuilt'
+        else:
+            return 'building self'
+
+
+class GMockDependencyPC(PkgConfigDependency):
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        assert name == 'gmock'
+        if kwargs.get('main'):
+            name = 'gmock_main'
+        super().__init__(name, environment, kwargs)
+
+
+class LLVMDependencyConfigTool(ConfigToolDependency):
+    """
+    LLVM uses a special tool, llvm-config, which has arguments for getting
+    c args, cxx args, and ldargs as well as version.
+    """
+    tool_name = 'llvm-config'
+    __cpp_blacklist = {'-DNDEBUG'}
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        self.tools = get_llvm_tool_names('llvm-config')
+
+        # Fedora starting with Fedora 30 adds a suffix of the number
+        # of bits in the isa that llvm targets, for example, on x86_64
+        # and aarch64 the name will be llvm-config-64, on x86 and arm
+        # it will be llvm-config-32.
+        if environment.machines[self.get_for_machine_from_kwargs(kwargs)].is_64_bit:
+            self.tools.append('llvm-config-64')
+        else:
+            self.tools.append('llvm-config-32')
+
+        # It's necessary for LLVM <= 3.8 to use the C++ linker. For 3.9 and 4.0
+        # the C linker works fine if only using the C API.
+        super().__init__(name, environment, kwargs, language='cpp')
+        self.provided_modules: T.List[str] = []
+        self.required_modules: mesonlib.OrderedSet[str] = mesonlib.OrderedSet()
+        self.module_details:   T.List[str] = []
+        if not self.is_found:
+            return
+
+        self.provided_modules = self.get_config_value(['--components'], 'modules')
+        modules = stringlistify(extract_as_list(kwargs, 'modules'))
+        self.check_components(modules)
+        opt_modules = stringlistify(extract_as_list(kwargs, 'optional_modules'))
+        self.check_components(opt_modules, required=False)
+
+        cargs = mesonlib.OrderedSet(self.get_config_value(['--cppflags'], 'compile_args'))
+        self.compile_args = list(cargs.difference(self.__cpp_blacklist))
+
+        if version_compare(self.version, '>= 3.9'):
+            self._set_new_link_args(environment)
+        else:
+            self._set_old_link_args()
+        self.link_args = strip_system_libdirs(environment, self.for_machine, self.link_args)
+        self.link_args = self.__fix_bogus_link_args(self.link_args)
+        if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
+            self.is_found = False
+            return
+
+    def __fix_bogus_link_args(self, args: T.List[str]) -> T.List[str]:
+        """This function attempts to fix bogus link arguments that llvm-config
+        generates.
+
+        Currently it works around the following:
+            - FreeBSD: when statically linking -l/usr/lib/libexecinfo.so will
+              be generated, strip the -l in cases like this.
+            - Windows: We may get -LIBPATH:... which is later interpreted as
+              "-L IBPATH:...", if we're using an msvc like compilers convert
+              that to "/LIBPATH", otherwise to "-L ..."
+        """
+
+        new_args = []
+        for arg in args:
+            if arg.startswith('-l') and arg.endswith('.so'):
+                new_args.append(arg.lstrip('-l'))
+            elif arg.startswith('-LIBPATH:'):
+                cpp = self.env.coredata.compilers[self.for_machine]['cpp']
+                new_args.extend(cpp.get_linker_search_args(arg.lstrip('-LIBPATH:')))
+            else:
+                new_args.append(arg)
+        return new_args
+
+    def __check_libfiles(self, shared: bool) -> None:
+        """Use llvm-config's --libfiles to check if libraries exist."""
+        mode = '--link-shared' if shared else '--link-static'
+
+        # Set self.required to true to force an exception in get_config_value
+        # if the returncode != 0
+        restore = self.required
+        self.required = True
+
+        try:
+            # It doesn't matter what the stage is, the caller needs to catch
+            # the exception anyway.
+            self.link_args = self.get_config_value(['--libfiles', mode], '')
+        finally:
+            self.required = restore
+
+    def _set_new_link_args(self, environment: 'Environment') -> None:
+        """How to set linker args for LLVM versions >= 3.9"""
+        try:
+            mode = self.get_config_value(['--shared-mode'], 'link_args')[0]
+        except IndexError:
+            mlog.debug('llvm-config --shared-mode returned an error')
+            self.is_found = False
+            return
+
+        if not self.static and mode == 'static':
+            # If llvm is configured with LLVM_BUILD_LLVM_DYLIB but not with
+            # LLVM_LINK_LLVM_DYLIB and not LLVM_BUILD_SHARED_LIBS (which
+            # upstream doesn't recommend using), then llvm-config will lie to
+            # you about how to do shared-linking. It wants to link to a a bunch
+            # of individual shared libs (which don't exist because llvm wasn't
+            # built with LLVM_BUILD_SHARED_LIBS.
+            #
+            # Therefore, we'll try to get the libfiles, if the return code is 0
+            # or we get an empty list, then we'll try to build a working
+            # configuration by hand.
+            try:
+                self.__check_libfiles(True)
+            except DependencyException:
+                lib_ext = get_shared_library_suffix(environment, self.for_machine)
+                libdir = self.get_config_value(['--libdir'], 'link_args')[0]
+                # Sort for reproducibility
+                matches = sorted(glob.iglob(os.path.join(libdir, f'libLLVM*{lib_ext}')))
+                if not matches:
+                    if self.required:
+                        raise
+                    self.is_found = False
+                    return
+
+                self.link_args = self.get_config_value(['--ldflags'], 'link_args')
+                libname = os.path.basename(matches[0]).rstrip(lib_ext).lstrip('lib')
+                self.link_args.append(f'-l{libname}')
+                return
+        elif self.static and mode == 'shared':
+            # If, however LLVM_BUILD_SHARED_LIBS is true # (*cough* gentoo *cough*)
+            # then this is correct. Building with LLVM_BUILD_SHARED_LIBS has a side
+            # effect, it stops the generation of static archives. Therefore we need
+            # to check for that and error out on static if this is the case
+            try:
+                self.__check_libfiles(False)
+            except DependencyException:
+                if self.required:
+                    raise
+                self.is_found = False
+                return
+
+        link_args = ['--link-static', '--system-libs'] if self.static else ['--link-shared']
+        self.link_args = self.get_config_value(
+            ['--libs', '--ldflags'] + link_args + list(self.required_modules),
+            'link_args')
+
+    def _set_old_link_args(self) -> None:
+        """Setting linker args for older versions of llvm.
+
+        Old versions of LLVM bring an extra level of insanity with them.
+        llvm-config will provide the correct arguments for static linking, but
+        not for shared-linnking, we have to figure those out ourselves, because
+        of course we do.
+        """
+        if self.static:
+            self.link_args = self.get_config_value(
+                ['--libs', '--ldflags', '--system-libs'] + list(self.required_modules),
+                'link_args')
+        else:
+            # llvm-config will provide arguments for static linking, so we get
+            # to figure out for ourselves what to link with. We'll do that by
+            # checking in the directory provided by --libdir for a library
+            # called libLLVM-.(so|dylib|dll)
+            libdir = self.get_config_value(['--libdir'], 'link_args')[0]
+
+            expected_name = f'libLLVM-{self.version}'
+            re_name = re.compile(fr'{expected_name}.(so|dll|dylib)$')
+
+            for file_ in os.listdir(libdir):
+                if re_name.match(file_):
+                    self.link_args = [f'-L{libdir}',
+                                      '-l{}'.format(os.path.splitext(file_.lstrip('lib'))[0])]
+                    break
+            else:
+                raise DependencyException(
+                    'Could not find a dynamically linkable library for LLVM.')
+
+    def check_components(self, modules: T.List[str], required: bool = True) -> None:
+        """Check for llvm components (modules in meson terms).
+
+        The required option is whether the module is required, not whether LLVM
+        is required.
+        """
+        for mod in sorted(set(modules)):
+            status = ''
+
+            if mod not in self.provided_modules:
+                if required:
+                    self.is_found = False
+                    if self.required:
+                        raise DependencyException(
+                            f'Could not find required LLVM Component: {mod}')
+                    status = '(missing)'
+                else:
+                    status = '(missing but optional)'
+            else:
+                self.required_modules.add(mod)
+
+            self.module_details.append(mod + status)
+
+    def log_details(self) -> str:
+        if self.module_details:
+            return 'modules: ' + ', '.join(self.module_details)
+        return ''
+
+class LLVMDependencyCMake(CMakeDependency):
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        self.llvm_modules = stringlistify(extract_as_list(kwargs, 'modules'))
+        self.llvm_opt_modules = stringlistify(extract_as_list(kwargs, 'optional_modules'))
+
+        compilers = None
+        if kwargs.get('native', False):
+            compilers = env.coredata.compilers.build
+        else:
+            compilers = env.coredata.compilers.host
+        if not compilers or not all(x in compilers for x in ('c', 'cpp')):
+            # Initialize basic variables
+            ExternalDependency.__init__(self, DependencyTypeName('cmake'), env, kwargs)
+
+            # Initialize CMake specific variables
+            self.found_modules: T.List[str] = []
+            self.name = name
+
+            # Warn and return
+            mlog.warning('The LLVM dependency was not found via CMake since both a C and C++ compiler are required.')
+            return
+
+        super().__init__(name, env, kwargs, language='cpp', force_use_global_compilers=True)
+
+        if self.traceparser is None:
+            return
+
+        if not self.is_found:
+            return
+
+        #CMake will return not found due to not defined LLVM_DYLIB_COMPONENTS
+        if not self.static and version_compare(self.version, '< 7.0') and self.llvm_modules:
+            mlog.warning('Before version 7.0 cmake does not export modules for dynamic linking, cannot check required modules')
+            return
+
+        # Extract extra include directories and definitions
+        inc_dirs = self.traceparser.get_cmake_var('PACKAGE_INCLUDE_DIRS')
+        defs = self.traceparser.get_cmake_var('PACKAGE_DEFINITIONS')
+        # LLVM explicitly uses space-separated variables rather than semicolon lists
+        if len(defs) == 1:
+            defs = defs[0].split(' ')
+        temp = ['-I' + x for x in inc_dirs] + defs
+        self.compile_args += [x for x in temp if x not in self.compile_args]
+        self.compile_args = strip_system_includedirs(env, self.for_machine, self.compile_args)
+        if not self._add_sub_dependency(threads_factory(env, self.for_machine, {})):
+            self.is_found = False
+            return
+
+    def _main_cmake_file(self) -> str:
+        # Use a custom CMakeLists.txt for LLVM
+        return 'CMakeListsLLVM.txt'
+
+    # Check version in CMake to return exact version as config tool (latest allowed)
+    # It is safe to add .0 to latest argument, it will discarded if we use search_version
+    def llvm_cmake_versions(self) -> T.List[str]:
+
+        def ver_from_suf(req: str) -> str:
+            return search_version(req.strip('-')+'.0')
+
+        def version_sorter(a: str, b: str) -> int:
+            if version_compare(a, "="+b):
+                return 0
+            if version_compare(a, "<"+b):
+                return 1
+            return -1
+
+        llvm_requested_versions = [ver_from_suf(x) for x in get_llvm_tool_names('') if version_compare(ver_from_suf(x), '>=0')]
+        if self.version_reqs:
+            llvm_requested_versions = [ver_from_suf(x) for x in get_llvm_tool_names('') if version_compare_many(ver_from_suf(x), self.version_reqs)]
+        # CMake sorting before 3.18 is incorrect, sort it here instead
+        return sorted(llvm_requested_versions, key=functools.cmp_to_key(version_sorter))
+
+    # Split required and optional modules to distinguish it in CMake
+    def _extra_cmake_opts(self) -> T.List[str]:
+        return ['-DLLVM_MESON_REQUIRED_MODULES={}'.format(';'.join(self.llvm_modules)),
+                '-DLLVM_MESON_OPTIONAL_MODULES={}'.format(';'.join(self.llvm_opt_modules)),
+                '-DLLVM_MESON_PACKAGE_NAMES={}'.format(';'.join(get_llvm_tool_names(self.name))),
+                '-DLLVM_MESON_VERSIONS={}'.format(';'.join(self.llvm_cmake_versions())),
+                '-DLLVM_MESON_DYLIB={}'.format('OFF' if self.static else 'ON')]
+
+    def _map_module_list(self, modules: T.List[T.Tuple[str, bool]], components: T.List[T.Tuple[str, bool]]) -> T.List[T.Tuple[str, bool]]:
+        res = []
+        for mod, required in modules:
+            cm_targets = self.traceparser.get_cmake_var(f'MESON_LLVM_TARGETS_{mod}')
+            if not cm_targets:
+                if required:
+                    raise self._gen_exception(f'LLVM module {mod} was not found')
+                else:
+                    mlog.warning('Optional LLVM module', mlog.bold(mod), 'was not found', fatal=False)
+                    continue
+            for i in cm_targets:
+                res += [(i, required)]
+        return res
+
+    def _original_module_name(self, module: str) -> str:
+        orig_name = self.traceparser.get_cmake_var(f'MESON_TARGET_TO_LLVM_{module}')
+        if orig_name:
+            return orig_name[0]
+        return module
+
+
+class ValgrindDependency(PkgConfigDependency):
+    '''
+    Consumers of Valgrind usually only need the compile args and do not want to
+    link to its (static) libraries.
+    '''
+    def __init__(self, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__('valgrind', env, kwargs)
+
+    def get_link_args(self, language: T.Optional[str] = None, raw: bool = False) -> T.List[str]:
+        return []
+
+packages['valgrind'] = ValgrindDependency
+
+
+class ZlibSystemDependency(SystemDependency):
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        from ..compilers.c import AppleClangCCompiler
+        from ..compilers.cpp import AppleClangCPPCompiler
+
+        m = self.env.machines[self.for_machine]
+
+        # I'm not sure this is entirely correct. What if we're cross compiling
+        # from something to macOS?
+        if ((m.is_darwin() and isinstance(self.clib_compiler, (AppleClangCCompiler, AppleClangCPPCompiler))) or
+                m.is_freebsd() or m.is_dragonflybsd() or m.is_android()):
+            # No need to set includes,
+            # on macos xcode/clang will do that for us.
+            # on freebsd zlib.h is in /usr/include
+
+            self.is_found = True
+            self.link_args = ['-lz']
+        else:
+            if self.clib_compiler.get_argument_syntax() == 'msvc':
+                libs = ['zlib1', 'zlib']
+            else:
+                libs = ['z']
+            for lib in libs:
+                l = self.clib_compiler.find_library(lib, environment, [])
+                h = self.clib_compiler.has_header('zlib.h', '', environment, dependencies=[self])
+                if l and h[0]:
+                    self.is_found = True
+                    self.link_args = l
+                    break
+            else:
+                return
+
+        v, _ = self.clib_compiler.get_define('ZLIB_VERSION', '#include ', self.env, [], [self])
+        self.version = v.strip('"')
+
+
+class JNISystemDependency(SystemDependency):
+    def __init__(self, environment: 'Environment', kwargs: JNISystemDependencyKW):
+        super().__init__('jni', environment, T.cast('T.Dict[str, T.Any]', kwargs))
+
+        self.feature_since = ('0.62.0', '')
+
+        m = self.env.machines[self.for_machine]
+
+        if 'java' not in environment.coredata.compilers[self.for_machine]:
+            detect_compiler(self.name, environment, self.for_machine, 'java')
+        self.javac = environment.coredata.compilers[self.for_machine]['java']
+        self.version = self.javac.version
+
+        modules: T.List[str] = mesonlib.listify(kwargs.get('modules', []))
+        for module in modules:
+            if module not in {'jvm', 'awt'}:
+                msg = f'Unknown JNI module ({module})'
+                if self.required:
+                    mlog.error(msg)
+                else:
+                    mlog.debug(msg)
+                self.is_found = False
+                return
+
+        if 'version' in kwargs and not version_compare(self.version, kwargs['version']):
+            mlog.error(f'Incorrect JDK version found ({self.version}), wanted {kwargs["version"]}')
+            self.is_found = False
+            return
+
+        self.java_home = environment.properties[self.for_machine].get_java_home()
+        if not self.java_home:
+            self.java_home = pathlib.Path(shutil.which(self.javac.exelist[0])).resolve().parents[1]
+            if m.is_darwin():
+                problem_java_prefix = pathlib.Path('/System/Library/Frameworks/JavaVM.framework/Versions')
+                if problem_java_prefix in self.java_home.parents:
+                    res = subprocess.run(['/usr/libexec/java_home', '--failfast', '--arch', m.cpu_family],
+                                         stdout=subprocess.PIPE)
+                    if res.returncode != 0:
+                        msg = 'JAVA_HOME could not be discovered on the system. Please set it explicitly.'
+                        if self.required:
+                            mlog.error(msg)
+                        else:
+                            mlog.debug(msg)
+                        self.is_found = False
+                        return
+                    self.java_home = pathlib.Path(res.stdout.decode().strip())
+
+        platform_include_dir = self.__machine_info_to_platform_include_dir(m)
+        if platform_include_dir is None:
+            mlog.error("Could not find a JDK platform include directory for your OS, please open an issue or provide a pull request.")
+            self.is_found = False
+            return
+
+        java_home_include = self.java_home / 'include'
+        self.compile_args.append(f'-I{java_home_include}')
+        self.compile_args.append(f'-I{java_home_include / platform_include_dir}')
+
+        if modules:
+            if m.is_windows():
+                java_home_lib = self.java_home / 'lib'
+                java_home_lib_server = java_home_lib
+            else:
+                if version_compare(self.version, '<= 1.8.0'):
+                    java_home_lib = self.java_home / 'jre' / 'lib' / self.__cpu_translate(m.cpu_family)
+                else:
+                    java_home_lib = self.java_home / 'lib'
+
+                java_home_lib_server = java_home_lib / 'server'
+
+            if 'jvm' in modules:
+                jvm = self.clib_compiler.find_library('jvm', environment, extra_dirs=[str(java_home_lib_server)])
+                if jvm is None:
+                    mlog.debug('jvm library not found.')
+                    self.is_found = False
+                else:
+                    self.link_args.extend(jvm)
+            if 'awt' in modules:
+                jawt = self.clib_compiler.find_library('jawt', environment, extra_dirs=[str(java_home_lib)])
+                if jawt is None:
+                    mlog.debug('jawt library not found.')
+                    self.is_found = False
+                else:
+                    self.link_args.extend(jawt)
+
+        self.is_found = True
+
+    @staticmethod
+    def __cpu_translate(cpu: str) -> str:
+        '''
+        The JDK and Meson have a disagreement here, so translate it over. In the event more
+        translation needs to be done, add to following dict.
+        '''
+        java_cpus = {
+            'x86_64': 'amd64',
+        }
+
+        return java_cpus.get(cpu, cpu)
+
+    @staticmethod
+    def __machine_info_to_platform_include_dir(m: 'MachineInfo') -> T.Optional[str]:
+        '''Translates the machine information to the platform-dependent include directory
+
+        When inspecting a JDK release tarball or $JAVA_HOME, inside the `include/` directory is a
+        platform-dependent directory that must be on the target's include path in addition to the
+        parent `include/` directory.
+        '''
+        if m.is_linux():
+            return 'linux'
+        elif m.is_windows():
+            return 'win32'
+        elif m.is_darwin():
+            return 'darwin'
+        elif m.is_sunos():
+            return 'solaris'
+        elif m.is_freebsd():
+            return 'freebsd'
+        elif m.is_netbsd():
+            return 'netbsd'
+        elif m.is_openbsd():
+            return 'openbsd'
+        elif m.is_dragonflybsd():
+            return 'dragonfly'
+
+        return None
+
+packages['jni'] = JNISystemDependency
+
+
+class JDKSystemDependency(JNISystemDependency):
+    def __init__(self, environment: 'Environment', kwargs: JNISystemDependencyKW):
+        super().__init__(environment, kwargs)
+
+        self.feature_since = ('0.59.0', '')
+        self.featurechecks.append(FeatureDeprecated(
+            'jdk system dependency',
+            '0.62.0',
+            'Use the jni system dependency instead'
+        ))
+
+packages['jdk'] = JDKSystemDependency
+
+
+packages['llvm'] = llvm_factory = DependencyFactory(
+    'LLVM',
+    [DependencyMethods.CMAKE, DependencyMethods.CONFIG_TOOL],
+    cmake_class=LLVMDependencyCMake,
+    configtool_class=LLVMDependencyConfigTool,
+)
+
+packages['gtest'] = gtest_factory = DependencyFactory(
+    'gtest',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+    pkgconfig_class=GTestDependencyPC,
+    system_class=GTestDependencySystem,
+)
+
+packages['gmock'] = gmock_factory = DependencyFactory(
+    'gmock',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+    pkgconfig_class=GMockDependencyPC,
+    system_class=GMockDependencySystem,
+)
+
+packages['zlib'] = zlib_factory = DependencyFactory(
+    'zlib',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE, DependencyMethods.SYSTEM],
+    cmake_name='ZLIB',
+    system_class=ZlibSystemDependency,
+)
diff --git a/vendored-meson/meson/mesonbuild/dependencies/dub.py b/vendored-meson/meson/mesonbuild/dependencies/dub.py
new file mode 100644
index 000000000000..37a9ea11e5eb
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/dub.py
@@ -0,0 +1,434 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from .base import ExternalDependency, DependencyException, DependencyTypeName
+from .pkgconfig import PkgConfigDependency
+from ..mesonlib import (Popen_safe, OptionKey, join_args, version_compare)
+from ..programs import ExternalProgram
+from .. import mlog
+import re
+import os
+import json
+import typing as T
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+
+
+class DubDependency(ExternalDependency):
+    # dub program and version
+    class_dubbin: T.Optional[T.Tuple[ExternalProgram, str]] = None
+    class_dubbin_searched = False
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(DependencyTypeName('dub'), environment, kwargs, language='d')
+        self.name = name
+        from ..compilers.d import DCompiler, d_feature_args
+
+        _temp_comp = super().get_compiler()
+        assert isinstance(_temp_comp, DCompiler)
+        self.compiler = _temp_comp
+
+        if 'required' in kwargs:
+            self.required = kwargs.get('required')
+
+        if DubDependency.class_dubbin is None and not DubDependency.class_dubbin_searched:
+            DubDependency.class_dubbin = self._check_dub()
+            DubDependency.class_dubbin_searched = True
+        if DubDependency.class_dubbin is None:
+            if self.required:
+                raise DependencyException('DUB not found.')
+            self.is_found = False
+            return
+
+        (self.dubbin, dubver) = DubDependency.class_dubbin
+
+        assert isinstance(self.dubbin, ExternalProgram)
+
+        # Check if Dub version is compatible with Meson
+        if version_compare(dubver, '>1.31.1'):
+            if self.required:
+                raise DependencyException(
+                    f"DUB version {dubver} is not compatible with Meson (can't locate artifacts in Dub cache)")
+            self.is_found = False
+            return
+
+        mlog.debug('Determining dependency {!r} with DUB executable '
+                   '{!r}'.format(name, self.dubbin.get_path()))
+
+        # if an explicit version spec was stated, use this when querying Dub
+        main_pack_spec = name
+        if 'version' in kwargs:
+            version_spec = kwargs['version']
+            if isinstance(version_spec, list):
+                version_spec = " ".join(version_spec)
+            main_pack_spec = f'{name}@{version_spec}'
+
+        # we need to know the target architecture
+        dub_arch = self.compiler.arch
+
+        # we need to know the build type as well
+        dub_buildtype = str(environment.coredata.get_option(OptionKey('buildtype')))
+        # MESON types: choices=['plain', 'debug', 'debugoptimized', 'release', 'minsize', 'custom'])),
+        # DUB types: debug (default), plain, release, release-debug, release-nobounds, unittest, profile, profile-gc,
+        # docs, ddox, cov, unittest-cov, syntax and custom
+        if dub_buildtype == 'debugoptimized':
+            dub_buildtype = 'release-debug'
+        elif dub_buildtype == 'minsize':
+            dub_buildtype = 'release'
+
+        # Ask dub for the package
+        describe_cmd = [
+            'describe', main_pack_spec, '--arch=' + dub_arch,
+            '--build=' + dub_buildtype, '--compiler=' + self.compiler.get_exelist()[-1]
+        ]
+        ret, res, err = self._call_dubbin(describe_cmd)
+
+        if ret != 0:
+            mlog.debug('DUB describe failed: ' + err)
+            if 'locally' in err:
+                fetch_cmd = ['dub', 'fetch', main_pack_spec]
+                mlog.error(mlog.bold(main_pack_spec), 'is not present locally. You may try the following command:')
+                mlog.log(mlog.bold(join_args(fetch_cmd)))
+            self.is_found = False
+            return
+
+        # A command that might be useful in case of missing DUB package
+        def dub_build_deep_command() -> str:
+            cmd = [
+                'dub', 'run', 'dub-build-deep', '--yes', '--', main_pack_spec,
+                '--arch=' + dub_arch, '--compiler=' + self.compiler.get_exelist()[-1],
+                '--build=' + dub_buildtype
+            ]
+            return join_args(cmd)
+
+        dub_comp_id = self.compiler.get_id().replace('llvm', 'ldc').replace('gcc', 'gdc')
+        description = json.loads(res)
+
+        self.compile_args = []
+        self.link_args = self.raw_link_args = []
+
+        show_buildtype_warning = False
+
+        def find_package_target(pkg: T.Dict[str, str]) -> bool:
+            nonlocal show_buildtype_warning
+            # try to find a static library in a DUB folder corresponding to
+            # version, configuration, compiler, arch and build-type
+            # if can find, add to link_args.
+            # link_args order is meaningful, so this function MUST be called in the right order
+            pack_id = f'{pkg["name"]}@{pkg["version"]}'
+            (tgt_file, compatibilities) = self._find_compatible_package_target(description, pkg, dub_comp_id)
+            if tgt_file is None:
+                if not compatibilities:
+                    mlog.error(mlog.bold(pack_id), 'not found')
+                elif 'compiler' not in compatibilities:
+                    mlog.error(mlog.bold(pack_id), 'found but not compiled with ', mlog.bold(dub_comp_id))
+                elif dub_comp_id != 'gdc' and 'compiler_version' not in compatibilities:
+                    mlog.error(mlog.bold(pack_id), 'found but not compiled with',
+                               mlog.bold(f'{dub_comp_id}-{self.compiler.version}'))
+                elif 'arch' not in compatibilities:
+                    mlog.error(mlog.bold(pack_id), 'found but not compiled for', mlog.bold(dub_arch))
+                elif 'platform' not in compatibilities:
+                    mlog.error(mlog.bold(pack_id), 'found but not compiled for',
+                               mlog.bold(description['platform'].join('.')))
+                elif 'configuration' not in compatibilities:
+                    mlog.error(mlog.bold(pack_id), 'found but not compiled for the',
+                               mlog.bold(pkg['configuration']), 'configuration')
+                else:
+                    mlog.error(mlog.bold(pack_id), 'not found')
+
+                mlog.log('You may try the following command to install the necessary DUB libraries:')
+                mlog.log(mlog.bold(dub_build_deep_command()))
+
+                return False
+
+            if 'build_type' not in compatibilities:
+                mlog.warning(mlog.bold(pack_id), 'found but not compiled as', mlog.bold(dub_buildtype))
+                show_buildtype_warning = True
+
+            self.link_args.append(tgt_file)
+            return True
+
+        # Main algorithm:
+        # 1. Ensure that the target is a compatible library type (not dynamic)
+        # 2. Find a compatible built library for the main dependency
+        # 3. Do the same for each sub-dependency.
+        #    link_args MUST be in the same order than the "linkDependencies" of the main target
+        # 4. Add other build settings (imports, versions etc.)
+
+        # 1
+        self.is_found = False
+        packages = {}
+        for pkg in description['packages']:
+            packages[pkg['name']] = pkg
+
+            if not pkg['active']:
+                continue
+
+            if pkg['targetType'] == 'dynamicLibrary':
+                mlog.error('DUB dynamic library dependencies are not supported.')
+                self.is_found = False
+                return
+
+            # check that the main dependency is indeed a library
+            if pkg['name'] == name:
+                self.is_found = True
+
+                if pkg['targetType'] not in ['library', 'sourceLibrary', 'staticLibrary']:
+                    mlog.error(mlog.bold(name), "found but it isn't a library")
+                    self.is_found = False
+                    return
+
+                self.version = pkg['version']
+                self.pkg = pkg
+
+        # collect all targets
+        targets = {}
+        for tgt in description['targets']:
+            targets[tgt['rootPackage']] = tgt
+
+        if name not in targets:
+            self.is_found = False
+            if self.pkg['targetType'] == 'sourceLibrary':
+                # source libraries have no associated targets,
+                # but some build settings like import folders must be found from the package object.
+                # Current algo only get these from "buildSettings" in the target object.
+                # Let's save this for a future PR.
+                # (See openssl DUB package for example of sourceLibrary)
+                mlog.error('DUB targets of type', mlog.bold('sourceLibrary'), 'are not supported.')
+            else:
+                mlog.error('Could not find target description for', mlog.bold(main_pack_spec))
+
+        if not self.is_found:
+            mlog.error(f'Could not find {name} in DUB description')
+            return
+
+        # Current impl only supports static libraries
+        self.static = True
+
+        # 2
+        if not find_package_target(self.pkg):
+            self.is_found = False
+            return
+
+        # 3
+        for link_dep in targets[name]['linkDependencies']:
+            pkg = packages[link_dep]
+            if not find_package_target(pkg):
+                self.is_found = False
+                return
+
+        if show_buildtype_warning:
+            mlog.log('If it is not suitable, try the following command and reconfigure Meson with', mlog.bold('--clearcache'))
+            mlog.log(mlog.bold(dub_build_deep_command()))
+
+        # 4
+        bs = targets[name]['buildSettings']
+
+        for flag in bs['dflags']:
+            self.compile_args.append(flag)
+
+        for path in bs['importPaths']:
+            self.compile_args.append('-I' + path)
+
+        for path in bs['stringImportPaths']:
+            if 'import_dir' not in d_feature_args[self.compiler.id]:
+                break
+            flag = d_feature_args[self.compiler.id]['import_dir']
+            self.compile_args.append(f'{flag}={path}')
+
+        for ver in bs['versions']:
+            if 'version' not in d_feature_args[self.compiler.id]:
+                break
+            flag = d_feature_args[self.compiler.id]['version']
+            self.compile_args.append(f'{flag}={ver}')
+
+        if bs['mainSourceFile']:
+            self.compile_args.append(bs['mainSourceFile'])
+
+        # pass static libraries
+        # linkerFiles are added during step 3
+        # for file in bs['linkerFiles']:
+        #     self.link_args.append(file)
+
+        for file in bs['sourceFiles']:
+            # sourceFiles may contain static libraries
+            if file.endswith('.lib') or file.endswith('.a'):
+                self.link_args.append(file)
+
+        for flag in bs['lflags']:
+            self.link_args.append(flag)
+
+        is_windows = self.env.machines.host.is_windows()
+        if is_windows:
+            winlibs = ['kernel32', 'user32', 'gdi32', 'winspool', 'shell32', 'ole32',
+                       'oleaut32', 'uuid', 'comdlg32', 'advapi32', 'ws2_32']
+
+        for lib in bs['libs']:
+            if os.name != 'nt':
+                # trying to add system libraries by pkg-config
+                pkgdep = PkgConfigDependency(lib, environment, {'required': 'true', 'silent': 'true'})
+                if pkgdep.is_found:
+                    for arg in pkgdep.get_compile_args():
+                        self.compile_args.append(arg)
+                    for arg in pkgdep.get_link_args():
+                        self.link_args.append(arg)
+                    for arg in pkgdep.get_link_args(raw=True):
+                        self.raw_link_args.append(arg)
+                    continue
+
+            if is_windows and lib in winlibs:
+                self.link_args.append(lib + '.lib')
+                continue
+
+            # fallback
+            self.link_args.append('-l'+lib)
+
+    # This function finds the target of the provided JSON package, built for the right
+    # compiler, architecture, configuration...
+    # It returns (target|None, {compatibilities})
+    # If None is returned for target, compatibilities will list what other targets were found without full compatibility
+    def _find_compatible_package_target(self, jdesc: T.Dict[str, str], jpack: T.Dict[str, str], dub_comp_id: str) -> T.Tuple[str, T.Set[str]]:
+        dub_build_path = os.path.join(jpack['path'], '.dub', 'build')
+
+        if not os.path.exists(dub_build_path):
+            return (None, None)
+
+        # try to find a dir like library-debug-linux.posix-x86_64-ldc_2081-EF934983A3319F8F8FF2F0E107A363BA
+
+        # fields are:
+        #  - configuration
+        #  - build type
+        #  - platform
+        #  - architecture
+        #  - compiler id (dmd, ldc, gdc)
+        #  - compiler version or frontend id or frontend version?
+
+        conf = jpack['configuration']
+        build_type = jdesc['buildType']
+        platforms = jdesc['platform']
+        archs = jdesc['architecture']
+
+        # Get D frontend version implemented in the compiler, or the compiler version itself
+        # gdc doesn't support this
+        comp_versions = []
+
+        if dub_comp_id != 'gdc':
+            comp_versions.append(self.compiler.version)
+
+            ret, res = self._call_compbin(['--version'])[0:2]
+            if ret != 0:
+                mlog.error('Failed to run {!r}', mlog.bold(dub_comp_id))
+                return (None, None)
+            d_ver_reg = re.search('v[0-9].[0-9][0-9][0-9].[0-9]', res)  # Ex.: v2.081.2
+
+            if d_ver_reg is not None:
+                frontend_version = d_ver_reg.group()
+                frontend_id = frontend_version.rsplit('.', 1)[0].replace(
+                    'v', '').replace('.', '')  # Fix structure. Ex.: 2081
+                comp_versions.extend([frontend_version, frontend_id])
+
+        compatibilities: T.Set[str] = set()
+
+        # build_type is not in check_list because different build types might be compatible.
+        # We do show a WARNING that the build type is not the same.
+        # It might be critical in release builds, and acceptable otherwise
+        check_list = ('configuration', 'platform', 'arch', 'compiler', 'compiler_version')
+
+        for entry in os.listdir(dub_build_path):
+
+            target = os.path.join(dub_build_path, entry, jpack['targetFileName'])
+            if not os.path.exists(target):
+                # unless Dub and Meson are racing, the target file should be present
+                # when the directory is present
+                mlog.debug("WARNING: Could not find a Dub target: " + target)
+                continue
+
+            # we build a new set for each entry, because if this target is returned
+            # we want to return only the compatibilities associated to this target
+            # otherwise we could miss the WARNING about build_type
+            comps = set()
+
+            if conf in entry:
+                comps.add('configuration')
+
+            if build_type in entry:
+                comps.add('build_type')
+
+            if all(platform in entry for platform in platforms):
+                comps.add('platform')
+
+            if all(arch in entry for arch in archs):
+                comps.add('arch')
+
+            if dub_comp_id in entry:
+                comps.add('compiler')
+
+            if dub_comp_id == 'gdc' or any(cv in entry for cv in comp_versions):
+                comps.add('compiler_version')
+
+            if all(key in comps for key in check_list):
+                return (target, comps)
+            else:
+                compatibilities = set.union(compatibilities, comps)
+
+        return (None, compatibilities)
+
+    def _call_dubbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str, str]:
+        assert isinstance(self.dubbin, ExternalProgram)
+        p, out, err = Popen_safe(self.dubbin.get_command() + args, env=env)
+        return p.returncode, out.strip(), err.strip()
+
+    def _call_compbin(self, args: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> T.Tuple[int, str, str]:
+        p, out, err = Popen_safe(self.compiler.get_exelist() + args, env=env)
+        return p.returncode, out.strip(), err.strip()
+
+    def _check_dub(self) -> T.Optional[T.Tuple[ExternalProgram, str]]:
+
+        def find() -> T.Optional[T.Tuple[ExternalProgram, str]]:
+            dubbin = ExternalProgram('dub', silent=True)
+
+            if not dubbin.found():
+                return None
+
+            try:
+                p, out = Popen_safe(dubbin.get_command() + ['--version'])[0:2]
+                if p.returncode != 0:
+                    mlog.warning('Found dub {!r} but couldn\'t run it'
+                                 ''.format(' '.join(dubbin.get_command())))
+                    return None
+
+            except (FileNotFoundError, PermissionError):
+                return None
+
+            vermatch = re.search(r'DUB version (\d+\.\d+\.\d+.*), ', out.strip())
+            if vermatch:
+                dubver = vermatch.group(1)
+            else:
+                mlog.warning(f"Found dub {' '.join(dubbin.get_command())} but couldn't parse version in {out.strip()}")
+                return None
+
+            return (dubbin, dubver)
+
+        found = find()
+
+        if found is None:
+            mlog.log('Found DUB:', mlog.red('NO'))
+        else:
+            (dubbin, dubver) = found
+            mlog.log('Found DUB:', mlog.bold(dubbin.get_path()),
+                     '(version %s)' % dubver)
+
+        return found
diff --git a/vendored-meson/meson/mesonbuild/dependencies/factory.py b/vendored-meson/meson/mesonbuild/dependencies/factory.py
new file mode 100644
index 000000000000..d50ce0f6b253
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/factory.py
@@ -0,0 +1,156 @@
+# Copyright 2013-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import functools
+import typing as T
+
+from .base import DependencyException, DependencyMethods
+from .base import process_method_kw
+from .base import BuiltinDependency, SystemDependency
+from .cmake import CMakeDependency
+from .framework import ExtraFrameworkDependency
+from .pkgconfig import PkgConfigDependency
+
+if T.TYPE_CHECKING:
+    from .base import ExternalDependency
+    from .configtool import ConfigToolDependency
+    from ..environment import Environment
+    from ..mesonlib import MachineChoice
+
+    DependencyGenerator = T.Callable[[], ExternalDependency]
+    FactoryFunc = T.Callable[
+        [
+            'Environment',
+            MachineChoice,
+            T.Dict[str, T.Any],
+            T.List[DependencyMethods]
+        ],
+        T.List[DependencyGenerator]
+    ]
+
+    WrappedFactoryFunc = T.Callable[
+        [
+            'Environment',
+            MachineChoice,
+            T.Dict[str, T.Any]
+        ],
+        T.List[DependencyGenerator]
+    ]
+
+    # This should be str, Environment, T.Dict[str, T.Any], T.Optional[str]
+    # But if you try that, you get error: Cannot infer type of lambda
+    CmakeDependencyFunc = T.Callable[..., CMakeDependency]
+
+class DependencyFactory:
+
+    """Factory to get dependencies from multiple sources.
+
+    This class provides an initializer that takes a set of names and classes
+    for various kinds of dependencies. When the initialized object is called
+    it returns a list of callables return Dependency objects to try in order.
+
+    :name: The name of the dependency. This will be passed as the name
+        parameter of the each dependency unless it is overridden on a per
+        type basis.
+    :methods: An ordered list of DependencyMethods. This is the order
+        dependencies will be returned in unless they are removed by the
+        _process_method function
+    :*_name: This will overwrite the name passed to the corresponding class.
+        For example, if the name is 'zlib', but cmake calls the dependency
+        'Z', then using `cmake_name='Z'` will pass the name as 'Z' to cmake.
+    :*_class: A *type* or callable that creates a class, and has the
+        signature of an ExternalDependency
+    :system_class: If you pass DependencyMethods.SYSTEM in methods, you must
+        set this argument.
+    """
+
+    def __init__(self, name: str, methods: T.List[DependencyMethods], *,
+                 extra_kwargs: T.Optional[T.Dict[str, T.Any]] = None,
+                 pkgconfig_name: T.Optional[str] = None,
+                 pkgconfig_class: 'T.Type[PkgConfigDependency]' = PkgConfigDependency,
+                 cmake_name: T.Optional[str] = None,
+                 cmake_class: 'T.Union[T.Type[CMakeDependency], CmakeDependencyFunc]' = CMakeDependency,
+                 configtool_class: 'T.Optional[T.Type[ConfigToolDependency]]' = None,
+                 framework_name: T.Optional[str] = None,
+                 framework_class: 'T.Type[ExtraFrameworkDependency]' = ExtraFrameworkDependency,
+                 builtin_class: 'T.Type[BuiltinDependency]' = BuiltinDependency,
+                 system_class: 'T.Type[SystemDependency]' = SystemDependency):
+
+        if DependencyMethods.CONFIG_TOOL in methods and not configtool_class:
+            raise DependencyException('A configtool must have a custom class')
+
+        self.extra_kwargs = extra_kwargs or {}
+        self.methods = methods
+        self.classes: T.Dict[
+            DependencyMethods,
+            T.Callable[['Environment', T.Dict[str, T.Any]], ExternalDependency]
+        ] = {
+            # Just attach the correct name right now, either the generic name
+            # or the method specific name.
+            DependencyMethods.EXTRAFRAMEWORK: functools.partial(framework_class, framework_name or name),
+            DependencyMethods.PKGCONFIG: functools.partial(pkgconfig_class, pkgconfig_name or name),
+            DependencyMethods.CMAKE: functools.partial(cmake_class, cmake_name or name),
+            DependencyMethods.SYSTEM: functools.partial(system_class, name),
+            DependencyMethods.BUILTIN: functools.partial(builtin_class, name),
+            DependencyMethods.CONFIG_TOOL: None,
+        }
+        if configtool_class is not None:
+            self.classes[DependencyMethods.CONFIG_TOOL] = functools.partial(configtool_class, name)
+
+    @staticmethod
+    def _process_method(method: DependencyMethods, env: 'Environment', for_machine: MachineChoice) -> bool:
+        """Report whether a method is valid or not.
+
+        If the method is valid, return true, otherwise return false. This is
+        used in a list comprehension to filter methods that are not possible.
+
+        By default this only remove EXTRAFRAMEWORK dependencies for non-mac platforms.
+        """
+        # Extra frameworks are only valid for macOS and other apple products
+        if (method is DependencyMethods.EXTRAFRAMEWORK and
+                not env.machines[for_machine].is_darwin()):
+            return False
+        return True
+
+    def __call__(self, env: 'Environment', for_machine: MachineChoice,
+                 kwargs: T.Dict[str, T.Any]) -> T.List['DependencyGenerator']:
+        """Return a list of Dependencies with the arguments already attached."""
+        methods = process_method_kw(self.methods, kwargs)
+        nwargs = self.extra_kwargs.copy()
+        nwargs.update(kwargs)
+
+        return [functools.partial(self.classes[m], env, nwargs) for m in methods
+                if self._process_method(m, env, for_machine)]
+
+
+def factory_methods(methods: T.Set[DependencyMethods]) -> T.Callable[['FactoryFunc'], 'WrappedFactoryFunc']:
+    """Decorator for handling methods for dependency factory functions.
+
+    This helps to make factory functions self documenting
+    >>> @factory_methods([DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE])
+    >>> def factory(env: Environment, for_machine: MachineChoice, kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+    >>>     pass
+    """
+
+    def inner(func: 'FactoryFunc') -> 'WrappedFactoryFunc':
+
+        @functools.wraps(func)
+        def wrapped(env: 'Environment', for_machine: MachineChoice, kwargs: T.Dict[str, T.Any]) -> T.List['DependencyGenerator']:
+            return func(env, for_machine, kwargs, process_method_kw(methods, kwargs))
+
+        return wrapped
+
+    return inner
diff --git a/vendored-meson/meson/mesonbuild/dependencies/framework.py b/vendored-meson/meson/mesonbuild/dependencies/framework.py
new file mode 100644
index 000000000000..b02b3ceb7d5b
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/framework.py
@@ -0,0 +1,121 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from .base import DependencyTypeName, ExternalDependency, DependencyException
+from ..mesonlib import MesonException, Version, stringlistify
+from .. import mlog
+from pathlib import Path
+import typing as T
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+
+class ExtraFrameworkDependency(ExternalDependency):
+    system_framework_paths: T.Optional[T.List[str]] = None
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+        paths = stringlistify(kwargs.get('paths', []))
+        super().__init__(DependencyTypeName('extraframeworks'), env, kwargs, language=language)
+        self.name = name
+        # Full path to framework directory
+        self.framework_path: T.Optional[str] = None
+        if not self.clib_compiler:
+            raise DependencyException('No C-like compilers are available')
+        if self.system_framework_paths is None:
+            try:
+                self.system_framework_paths = self.clib_compiler.find_framework_paths(self.env)
+            except MesonException as e:
+                if 'non-clang' in str(e):
+                    # Apple frameworks can only be found (and used) with the
+                    # system compiler. It is not available so bail immediately.
+                    self.is_found = False
+                    return
+                raise
+        self.detect(name, paths)
+
+    def detect(self, name: str, paths: T.List[str]) -> None:
+        if not paths:
+            paths = self.system_framework_paths
+        for p in paths:
+            mlog.debug(f'Looking for framework {name} in {p}')
+            # We need to know the exact framework path because it's used by the
+            # Qt5 dependency class, and for setting the include path. We also
+            # want to avoid searching in an invalid framework path which wastes
+            # time and can cause a false positive.
+            framework_path = self._get_framework_path(p, name)
+            if framework_path is None:
+                continue
+            # We want to prefer the specified paths (in order) over the system
+            # paths since these are "extra" frameworks.
+            # For example, Python2's framework is in /System/Library/Frameworks and
+            # Python3's framework is in /Library/Frameworks, but both are called
+            # Python.framework. We need to know for sure that the framework was
+            # found in the path we expect.
+            allow_system = p in self.system_framework_paths
+            args = self.clib_compiler.find_framework(name, self.env, [p], allow_system)
+            if args is None:
+                continue
+            self.link_args = args
+            self.framework_path = framework_path.as_posix()
+            self.compile_args = ['-F' + self.framework_path]
+            # We need to also add -I includes to the framework because all
+            # cross-platform projects such as OpenGL, Python, Qt, GStreamer,
+            # etc do not use "framework includes":
+            # https://developer.apple.com/library/archive/documentation/MacOSX/Conceptual/BPFrameworks/Tasks/IncludingFrameworks.html
+            incdir = self._get_framework_include_path(framework_path)
+            if incdir:
+                self.compile_args += ['-I' + incdir]
+            self.is_found = True
+            return
+
+    def _get_framework_path(self, path: str, name: str) -> T.Optional[Path]:
+        p = Path(path)
+        lname = name.lower()
+        for d in p.glob('*.framework/'):
+            if lname == d.name.rsplit('.', 1)[0].lower():
+                return d
+        return None
+
+    def _get_framework_latest_version(self, path: Path) -> str:
+        versions = []
+        for each in path.glob('Versions/*'):
+            # macOS filesystems are usually case-insensitive
+            if each.name.lower() == 'current':
+                continue
+            versions.append(Version(each.name))
+        if len(versions) == 0:
+            # most system frameworks do not have a 'Versions' directory
+            return 'Headers'
+        return 'Versions/{}/Headers'.format(sorted(versions)[-1]._s)
+
+    def _get_framework_include_path(self, path: Path) -> T.Optional[str]:
+        # According to the spec, 'Headers' must always be a symlink to the
+        # Headers directory inside the currently-selected version of the
+        # framework, but sometimes frameworks are broken. Look in 'Versions'
+        # for the currently-selected version or pick the latest one.
+        trials = ('Headers', 'Versions/Current/Headers',
+                  self._get_framework_latest_version(path))
+        for each in trials:
+            trial = path / each
+            if trial.is_dir():
+                return trial.as_posix()
+        return None
+
+    def log_info(self) -> str:
+        return self.framework_path or ''
+
+    @staticmethod
+    def log_tried() -> str:
+        return 'framework'
diff --git a/vendored-meson/meson/mesonbuild/dependencies/hdf5.py b/vendored-meson/meson/mesonbuild/dependencies/hdf5.py
new file mode 100644
index 000000000000..392bb0964466
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/hdf5.py
@@ -0,0 +1,183 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for miscellaneous external dependencies.
+from __future__ import annotations
+
+import functools
+import os
+import re
+import subprocess
+from pathlib import Path
+
+from ..mesonlib import Popen_safe, OrderedSet, join_args
+from ..programs import ExternalProgram
+from .base import DependencyException, DependencyMethods
+from .configtool import ConfigToolDependency
+from .detect import packages
+from .pkgconfig import PkgConfigDependency
+from .factory import factory_methods
+import typing as T
+
+if T.TYPE_CHECKING:
+    from .factory import DependencyGenerator
+    from ..environment import Environment
+    from ..mesonlib import MachineChoice
+
+
+class HDF5PkgConfigDependency(PkgConfigDependency):
+
+    """Handle brokenness in the HDF5 pkg-config files."""
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+        language = language or 'c'
+        if language not in {'c', 'cpp', 'fortran'}:
+            raise DependencyException(f'Language {language} is not supported with HDF5.')
+
+        super().__init__(name, environment, kwargs, language)
+        if not self.is_found:
+            return
+
+        # some broken pkgconfig don't actually list the full path to the needed includes
+        newinc = []  # type: T.List[str]
+        for arg in self.compile_args:
+            if arg.startswith('-I'):
+                stem = 'static' if self.static else 'shared'
+                if (Path(arg[2:]) / stem).is_dir():
+                    newinc.append('-I' + str(Path(arg[2:]) / stem))
+        self.compile_args += newinc
+
+        link_args = []  # type: T.List[str]
+        for larg in self.get_link_args():
+            lpath = Path(larg)
+            # some pkg-config hdf5.pc (e.g. Ubuntu) don't include the commonly-used HL HDF5 libraries,
+            # so let's add them if they exist
+            # additionally, some pkgconfig HDF5 HL files are malformed so let's be sure to find HL anyway
+            if lpath.is_file():
+                hl = []
+                if language == 'cpp':
+                    hl += ['_hl_cpp', '_cpp']
+                elif language == 'fortran':
+                    hl += ['_hl_fortran', 'hl_fortran', '_fortran']
+                hl += ['_hl']  # C HL library, always needed
+
+                suffix = '.' + lpath.name.split('.', 1)[1]  # in case of .dll.a
+                for h in hl:
+                    hlfn = lpath.parent / (lpath.name.split('.', 1)[0] + h + suffix)
+                    if hlfn.is_file():
+                        link_args.append(str(hlfn))
+                # HDF5 C libs are required by other HDF5 languages
+                link_args.append(larg)
+            else:
+                link_args.append(larg)
+
+        self.link_args = link_args
+
+
+class HDF5ConfigToolDependency(ConfigToolDependency):
+
+    """Wrapper around hdf5 binary config tools."""
+
+    version_arg = '-showconfig'
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+        language = language or 'c'
+        if language not in {'c', 'cpp', 'fortran'}:
+            raise DependencyException(f'Language {language} is not supported with HDF5.')
+
+        if language == 'c':
+            cenv = 'CC'
+            tools = ['h5cc', 'h5pcc']
+        elif language == 'cpp':
+            cenv = 'CXX'
+            tools = ['h5c++', 'h5pc++']
+        elif language == 'fortran':
+            cenv = 'FC'
+            tools = ['h5fc', 'h5pfc']
+        else:
+            raise DependencyException('How did you get here?')
+
+        # We need this before we call super()
+        for_machine = self.get_for_machine_from_kwargs(kwargs)
+
+        nkwargs = kwargs.copy()
+        nkwargs['tools'] = tools
+
+        # Override the compiler that the config tools are going to use by
+        # setting the environment variables that they use for the compiler and
+        # linkers.
+        compiler = environment.coredata.compilers[for_machine][language]
+        try:
+            os.environ[f'HDF5_{cenv}'] = join_args(compiler.get_exelist())
+            os.environ[f'HDF5_{cenv}LINKER'] = join_args(compiler.get_linker_exelist())
+            super().__init__(name, environment, nkwargs, language)
+        finally:
+            del os.environ[f'HDF5_{cenv}']
+            del os.environ[f'HDF5_{cenv}LINKER']
+        if not self.is_found:
+            return
+
+        # We first need to call the tool with -c to get the compile arguments
+        # and then without -c to get the link arguments.
+        args = self.get_config_value(['-show', '-c'], 'args')[1:]
+        args += self.get_config_value(['-show', '-noshlib' if self.static else '-shlib'], 'args')[1:]
+        for arg in args:
+            if arg.startswith(('-I', '-f', '-D')) or arg == '-pthread':
+                self.compile_args.append(arg)
+            elif arg.startswith(('-L', '-l', '-Wl')):
+                self.link_args.append(arg)
+            elif Path(arg).is_file():
+                self.link_args.append(arg)
+
+        # If the language is not C we need to add C as a subdependency
+        if language != 'c':
+            nkwargs = kwargs.copy()
+            nkwargs['language'] = 'c'
+            # I'm being too clever for mypy and pylint
+            self.is_found = self._add_sub_dependency(hdf5_factory(environment, for_machine, nkwargs))  # pylint: disable=no-value-for-parameter
+
+    def _sanitize_version(self, ver: str) -> str:
+        v = re.search(r'\s*HDF5 Version: (\d+\.\d+\.\d+)', ver)
+        return v.group(1)
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL})
+def hdf5_factory(env: 'Environment', for_machine: 'MachineChoice',
+                 kwargs: T.Dict[str, T.Any], methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+    language = kwargs.get('language')
+    candidates: T.List['DependencyGenerator'] = []
+
+    if DependencyMethods.PKGCONFIG in methods:
+        # Use an ordered set so that these remain the first tried pkg-config files
+        pkgconfig_files = OrderedSet(['hdf5', 'hdf5-serial'])
+        PCEXE = PkgConfigDependency._detect_pkgbin(False, env, for_machine)
+        pcenv = PkgConfigDependency.setup_env(os.environ, env, for_machine)
+        if PCEXE:
+            assert isinstance(PCEXE, ExternalProgram)
+            # some distros put hdf5-1.2.3.pc with version number in .pc filename.
+            ret, stdout, _ = Popen_safe(PCEXE.get_command() + ['--list-all'], stderr=subprocess.DEVNULL, env=pcenv)
+            if ret.returncode == 0:
+                for pkg in stdout.split('\n'):
+                    if pkg.startswith('hdf5'):
+                        pkgconfig_files.add(pkg.split(' ', 1)[0])
+
+        for pkg in pkgconfig_files:
+            candidates.append(functools.partial(HDF5PkgConfigDependency, pkg, env, kwargs, language))
+
+    if DependencyMethods.CONFIG_TOOL in methods:
+        candidates.append(functools.partial(HDF5ConfigToolDependency, 'hdf5', env, kwargs, language))
+
+    return candidates
+
+packages['hdf5'] = hdf5_factory
diff --git a/vendored-meson/meson/mesonbuild/dependencies/misc.py b/vendored-meson/meson/mesonbuild/dependencies/misc.py
new file mode 100644
index 000000000000..d77566961a3c
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/misc.py
@@ -0,0 +1,617 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for miscellaneous external dependencies.
+from __future__ import annotations
+
+import functools
+import re
+import typing as T
+
+from .. import mesonlib
+from .. import mlog
+from .base import DependencyException, DependencyMethods
+from .base import BuiltinDependency, SystemDependency
+from .cmake import CMakeDependency
+from .configtool import ConfigToolDependency
+from .detect import packages
+from .factory import DependencyFactory, factory_methods
+from .pkgconfig import PkgConfigDependency
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment, MachineChoice
+    from .factory import DependencyGenerator
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE})
+def netcdf_factory(env: 'Environment',
+                   for_machine: 'MachineChoice',
+                   kwargs: T.Dict[str, T.Any],
+                   methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+    language = kwargs.get('language', 'c')
+    if language not in ('c', 'cpp', 'fortran'):
+        raise DependencyException(f'Language {language} is not supported with NetCDF.')
+
+    candidates: T.List['DependencyGenerator'] = []
+
+    if DependencyMethods.PKGCONFIG in methods:
+        if language == 'fortran':
+            pkg = 'netcdf-fortran'
+        else:
+            pkg = 'netcdf'
+
+        candidates.append(functools.partial(PkgConfigDependency, pkg, env, kwargs, language=language))
+
+    if DependencyMethods.CMAKE in methods:
+        candidates.append(functools.partial(CMakeDependency, 'NetCDF', env, kwargs, language=language))
+
+    return candidates
+
+packages['netcdf'] = netcdf_factory
+
+
+class DlBuiltinDependency(BuiltinDependency):
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, env, kwargs)
+        self.feature_since = ('0.62.0', "consider checking for `dlopen` with and without `find_library('dl')`")
+
+        if self.clib_compiler.has_function('dlopen', '#include ', env)[0]:
+            self.is_found = True
+
+
+class DlSystemDependency(SystemDependency):
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, env, kwargs)
+        self.feature_since = ('0.62.0', "consider checking for `dlopen` with and without `find_library('dl')`")
+
+        h = self.clib_compiler.has_header('dlfcn.h', '', env)
+        self.link_args = self.clib_compiler.find_library('dl', env, [], self.libtype)
+
+        if h[0] and self.link_args:
+            self.is_found = True
+
+
+class OpenMPDependency(SystemDependency):
+    # Map date of specification release (which is the macro value) to a version.
+    VERSIONS = {
+        '201811': '5.0',
+        '201611': '5.0-revision1',  # This is supported by ICC 19.x
+        '201511': '4.5',
+        '201307': '4.0',
+        '201107': '3.1',
+        '200805': '3.0',
+        '200505': '2.5',
+        '200203': '2.0',
+        '199810': '1.0',
+    }
+
+    def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        language = kwargs.get('language')
+        super().__init__('openmp', environment, kwargs, language=language)
+        self.is_found = False
+        if self.clib_compiler.get_id() == 'nagfor':
+            # No macro defined for OpenMP, but OpenMP 3.1 is supported.
+            self.version = '3.1'
+            self.is_found = True
+            self.compile_args = self.link_args = self.clib_compiler.openmp_flags()
+            return
+        if self.clib_compiler.get_id() == 'pgi':
+            # through at least PGI 19.4, there is no macro defined for OpenMP, but OpenMP 3.1 is supported.
+            self.version = '3.1'
+            self.is_found = True
+            self.compile_args = self.link_args = self.clib_compiler.openmp_flags()
+            return
+        try:
+            openmp_date = self.clib_compiler.get_define(
+                '_OPENMP', '', self.env, self.clib_compiler.openmp_flags(), [self], disable_cache=True)[0]
+        except mesonlib.EnvironmentException as e:
+            mlog.debug('OpenMP support not available in the compiler')
+            mlog.debug(e)
+            openmp_date = None
+
+        if openmp_date:
+            try:
+                self.version = self.VERSIONS[openmp_date]
+            except KeyError:
+                mlog.debug(f'Could not find an OpenMP version matching {openmp_date}')
+                if openmp_date == '_OPENMP':
+                    mlog.debug('This can be caused by flags such as gcc\'s `-fdirectives-only`, which affect preprocessor behavior.')
+                return
+            # Flang has omp_lib.h
+            header_names = ('omp.h', 'omp_lib.h')
+            for name in header_names:
+                if self.clib_compiler.has_header(name, '', self.env, dependencies=[self], disable_cache=True)[0]:
+                    self.is_found = True
+                    self.compile_args = self.clib_compiler.openmp_flags()
+                    self.link_args = self.clib_compiler.openmp_link_flags()
+                    break
+            if not self.is_found:
+                mlog.log(mlog.yellow('WARNING:'), 'OpenMP found but omp.h missing.')
+
+packages['openmp'] = OpenMPDependency
+
+
+class ThreadDependency(SystemDependency):
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__(name, environment, kwargs)
+        self.is_found = True
+        # Happens if you are using a language with threads
+        # concept without C, such as plain Cuda.
+        if not self.clib_compiler:
+            self.compile_args = []
+            self.link_args = []
+        else:
+            self.compile_args = self.clib_compiler.thread_flags(environment)
+            self.link_args = self.clib_compiler.thread_link_flags(environment)
+
+
+class BlocksDependency(SystemDependency):
+    def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__('blocks', environment, kwargs)
+        self.name = 'blocks'
+        self.is_found = False
+
+        if self.env.machines[self.for_machine].is_darwin():
+            self.compile_args = []
+            self.link_args = []
+        else:
+            self.compile_args = ['-fblocks']
+            self.link_args = ['-lBlocksRuntime']
+
+            if not self.clib_compiler.has_header('Block.h', '', environment, disable_cache=True) or \
+               not self.clib_compiler.find_library('BlocksRuntime', environment, []):
+                mlog.log(mlog.red('ERROR:'), 'BlocksRuntime not found.')
+                return
+
+        source = '''
+            int main(int argc, char **argv)
+            {
+                int (^callback)(void) = ^ int (void) { return 0; };
+                return callback();
+            }'''
+
+        with self.clib_compiler.compile(source, extra_args=self.compile_args + self.link_args) as p:
+            if p.returncode != 0:
+                mlog.log(mlog.red('ERROR:'), 'Compiler does not support blocks extension.')
+                return
+
+            self.is_found = True
+
+packages['blocks'] = BlocksDependency
+
+
+class PcapDependencyConfigTool(ConfigToolDependency):
+
+    tools = ['pcap-config']
+    tool_name = 'pcap-config'
+
+    # version 1.10.2 added error checking for invalid arguments
+    # version 1.10.3 will hopefully add actual support for --version
+    skip_version = '--help'
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
+        if self.version is None:
+            # older pcap-config versions don't support this
+            self.version = self.get_pcap_lib_version()
+
+    def get_pcap_lib_version(self) -> T.Optional[str]:
+        # Since we seem to need to run a program to discover the pcap version,
+        # we can't do that when cross-compiling
+        # FIXME: this should be handled if we have an exe_wrapper
+        if not self.env.machines.matches_build_machine(self.for_machine):
+            return None
+
+        v = self.clib_compiler.get_return_value('pcap_lib_version', 'string',
+                                                '#include ', self.env, [], [self])
+        v = re.sub(r'libpcap version ', '', str(v))
+        v = re.sub(r' -- Apple version.*$', '', v)
+        return v
+
+
+class CupsDependencyConfigTool(ConfigToolDependency):
+
+    tools = ['cups-config']
+    tool_name = 'cups-config'
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--ldflags', '--libs'], 'link_args')
+
+
+class LibWmfDependencyConfigTool(ConfigToolDependency):
+
+    tools = ['libwmf-config']
+    tool_name = 'libwmf-config'
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
+
+
+class LibGCryptDependencyConfigTool(ConfigToolDependency):
+
+    tools = ['libgcrypt-config']
+    tool_name = 'libgcrypt-config'
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
+        self.version = self.get_config_value(['--version'], 'version')[0]
+
+
+class GpgmeDependencyConfigTool(ConfigToolDependency):
+
+    tools = ['gpgme-config']
+    tool_name = 'gpg-config'
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
+        self.version = self.get_config_value(['--version'], 'version')[0]
+
+
+class ShadercDependency(SystemDependency):
+
+    def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__('shaderc', environment, kwargs)
+
+        static_lib = 'shaderc_combined'
+        shared_lib = 'shaderc_shared'
+
+        libs = [shared_lib, static_lib]
+        if self.static:
+            libs.reverse()
+
+        cc = self.get_compiler()
+
+        for lib in libs:
+            self.link_args = cc.find_library(lib, environment, [])
+            if self.link_args is not None:
+                self.is_found = True
+
+                if self.static and lib != static_lib:
+                    mlog.warning(f'Static library {static_lib!r} not found for dependency '
+                                 f'{self.name!r}, may not be statically linked')
+
+                break
+
+
+class CursesConfigToolDependency(ConfigToolDependency):
+
+    """Use the curses config tools."""
+
+    tool = 'curses-config'
+    # ncurses5.4-config is for macOS Catalina
+    tools = ['ncursesw6-config', 'ncursesw5-config', 'ncurses6-config', 'ncurses5-config', 'ncurses5.4-config']
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None):
+        super().__init__(name, env, kwargs, language)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
+
+
+class CursesSystemDependency(SystemDependency):
+
+    """Curses dependency the hard way.
+
+    This replaces hand rolled find_library() and has_header() calls. We
+    provide this for portability reasons, there are a large number of curses
+    implementations, and the differences between them can be very annoying.
+    """
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, env, kwargs)
+
+        candidates = [
+            ('pdcurses', ['pdcurses/curses.h']),
+            ('ncursesw',  ['ncursesw/ncurses.h', 'ncurses.h']),
+            ('ncurses',  ['ncurses/ncurses.h', 'ncurses/curses.h', 'ncurses.h']),
+            ('curses',  ['curses.h']),
+        ]
+
+        # Not sure how else to elegantly break out of both loops
+        for lib, headers in candidates:
+            l = self.clib_compiler.find_library(lib, env, [])
+            if l:
+                for header in headers:
+                    h = self.clib_compiler.has_header(header, '', env)
+                    if h[0]:
+                        self.is_found = True
+                        self.link_args = l
+                        # Not sure how to find version for non-ncurses curses
+                        # implementations. The one in illumos/OpenIndiana
+                        # doesn't seem to have a version defined in the header.
+                        if lib.startswith('ncurses'):
+                            v, _ = self.clib_compiler.get_define('NCURSES_VERSION', f'#include <{header}>', env, [], [self])
+                            self.version = v.strip('"')
+                        if lib.startswith('pdcurses'):
+                            v_major, _ = self.clib_compiler.get_define('PDC_VER_MAJOR', f'#include <{header}>', env, [], [self])
+                            v_minor, _ = self.clib_compiler.get_define('PDC_VER_MINOR', f'#include <{header}>', env, [], [self])
+                            self.version = f'{v_major}.{v_minor}'
+
+                        # Check the version if possible, emit a warning if we can't
+                        req = kwargs.get('version')
+                        if req:
+                            if self.version:
+                                self.is_found = mesonlib.version_compare(self.version, req)
+                            else:
+                                mlog.warning('Cannot determine version of curses to compare against.')
+
+                        if self.is_found:
+                            mlog.debug('Curses library:', l)
+                            mlog.debug('Curses header:', header)
+                            break
+            if self.is_found:
+                break
+
+
+class IconvBuiltinDependency(BuiltinDependency):
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, env, kwargs)
+        self.feature_since = ('0.60.0', "consider checking for `iconv_open` with and without `find_library('iconv')`")
+        code = '''#include \n\nint main() {\n    iconv_open("","");\n}''' # [ignore encoding] this is C, not python, Mr. Lint
+
+        if self.clib_compiler.links(code, env)[0]:
+            self.is_found = True
+
+
+class IconvSystemDependency(SystemDependency):
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, env, kwargs)
+        self.feature_since = ('0.60.0', "consider checking for `iconv_open` with and without find_library('iconv')")
+
+        h = self.clib_compiler.has_header('iconv.h', '', env)
+        self.link_args = self.clib_compiler.find_library('iconv', env, [], self.libtype)
+
+        if h[0] and self.link_args:
+            self.is_found = True
+
+
+class IntlBuiltinDependency(BuiltinDependency):
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, env, kwargs)
+        self.feature_since = ('0.59.0', "consider checking for `ngettext` with and without `find_library('intl')`")
+        code = '''#include \n\nint main() {\n    gettext("Hello world");\n}'''
+
+        if self.clib_compiler.links(code, env)[0]:
+            self.is_found = True
+
+
+class IntlSystemDependency(SystemDependency):
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, env, kwargs)
+        self.feature_since = ('0.59.0', "consider checking for `ngettext` with and without `find_library('intl')`")
+
+        h = self.clib_compiler.has_header('libintl.h', '', env)
+        self.link_args = self.clib_compiler.find_library('intl', env, [], self.libtype)
+
+        if h[0] and self.link_args:
+            self.is_found = True
+
+            if self.static:
+                if not self._add_sub_dependency(iconv_factory(env, self.for_machine, {'static': True})):
+                    self.is_found = False
+                    return
+
+
+class OpensslSystemDependency(SystemDependency):
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, env, kwargs)
+
+        dependency_kwargs = {
+            'method': 'system',
+            'static': self.static,
+        }
+        if not self.clib_compiler.has_header('openssl/ssl.h', '', env)[0]:
+            return
+
+        # openssl >= 3 only
+        self.version = self.clib_compiler.get_define('OPENSSL_VERSION_STR', '#include ', env, [], [self])[0]
+        # openssl < 3 only
+        if not self.version:
+            version_hex = self.clib_compiler.get_define('OPENSSL_VERSION_NUMBER', '#include ', env, [], [self])[0]
+            if not version_hex:
+                return
+            version_hex = version_hex.rstrip('L')
+            version_ints = [((int(version_hex.rstrip('L'), 16) >> 4 + i) & 0xFF) for i in (24, 16, 8, 0)]
+            # since this is openssl, the format is 1.2.3a in four parts
+            self.version = '.'.join(str(i) for i in version_ints[:3]) + chr(ord('a') + version_ints[3] - 1)
+
+        if name == 'openssl':
+            if self._add_sub_dependency(libssl_factory(env, self.for_machine, dependency_kwargs)) and \
+                    self._add_sub_dependency(libcrypto_factory(env, self.for_machine, dependency_kwargs)):
+                self.is_found = True
+            return
+        else:
+            self.link_args = self.clib_compiler.find_library(name.lstrip('lib'), env, [], self.libtype)
+            if not self.link_args:
+                return
+
+        if not self.static:
+            self.is_found = True
+        else:
+            if name == 'libssl':
+                if self._add_sub_dependency(libcrypto_factory(env, self.for_machine, dependency_kwargs)):
+                    self.is_found = True
+            elif name == 'libcrypto':
+                use_threads = self.clib_compiler.has_header_symbol('openssl/opensslconf.h', 'OPENSSL_THREADS', '', env, dependencies=[self])[0]
+                if not use_threads or self._add_sub_dependency(threads_factory(env, self.for_machine, {})):
+                    self.is_found = True
+                # only relevant on platforms where it is distributed with the libc, in which case it always succeeds
+                sublib = self.clib_compiler.find_library('dl', env, [], self.libtype)
+                if sublib:
+                    self.link_args.extend(sublib)
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.SYSTEM})
+def curses_factory(env: 'Environment',
+                   for_machine: 'MachineChoice',
+                   kwargs: T.Dict[str, T.Any],
+                   methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+    candidates: T.List['DependencyGenerator'] = []
+
+    if DependencyMethods.PKGCONFIG in methods:
+        pkgconfig_files = ['pdcurses', 'ncursesw', 'ncurses', 'curses']
+        for pkg in pkgconfig_files:
+            candidates.append(functools.partial(PkgConfigDependency, pkg, env, kwargs))
+
+    # There are path handling problems with these methods on msys, and they
+    # don't apply to windows otherwise (cygwin is handled separately from
+    # windows)
+    if not env.machines[for_machine].is_windows():
+        if DependencyMethods.CONFIG_TOOL in methods:
+            candidates.append(functools.partial(CursesConfigToolDependency, 'curses', env, kwargs))
+
+        if DependencyMethods.SYSTEM in methods:
+            candidates.append(functools.partial(CursesSystemDependency, 'curses', env, kwargs))
+
+    return candidates
+packages['curses'] = curses_factory
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM})
+def shaderc_factory(env: 'Environment',
+                    for_machine: 'MachineChoice',
+                    kwargs: T.Dict[str, T.Any],
+                    methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+    """Custom DependencyFactory for ShaderC.
+
+    ShaderC's odd you get three different libraries from the same build
+    thing are just easier to represent as a separate function than
+    twisting DependencyFactory even more.
+    """
+    candidates: T.List['DependencyGenerator'] = []
+
+    if DependencyMethods.PKGCONFIG in methods:
+        # ShaderC packages their shared and static libs together
+        # and provides different pkg-config files for each one. We
+        # smooth over this difference by handling the static
+        # keyword before handing off to the pkg-config handler.
+        shared_libs = ['shaderc']
+        static_libs = ['shaderc_combined', 'shaderc_static']
+
+        if kwargs.get('static', env.coredata.get_option(mesonlib.OptionKey('prefer_static'))):
+            c = [functools.partial(PkgConfigDependency, name, env, kwargs)
+                 for name in static_libs + shared_libs]
+        else:
+            c = [functools.partial(PkgConfigDependency, name, env, kwargs)
+                 for name in shared_libs + static_libs]
+        candidates.extend(c)
+
+    if DependencyMethods.SYSTEM in methods:
+        candidates.append(functools.partial(ShadercDependency, env, kwargs))
+
+    return candidates
+packages['shaderc'] = shaderc_factory
+
+
+packages['cups'] = cups_factory = DependencyFactory(
+    'cups',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK, DependencyMethods.CMAKE],
+    configtool_class=CupsDependencyConfigTool,
+    cmake_name='Cups',
+)
+
+packages['dl'] = dl_factory = DependencyFactory(
+    'dl',
+    [DependencyMethods.BUILTIN, DependencyMethods.SYSTEM],
+    builtin_class=DlBuiltinDependency,
+    system_class=DlSystemDependency,
+)
+
+packages['gpgme'] = gpgme_factory = DependencyFactory(
+    'gpgme',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    configtool_class=GpgmeDependencyConfigTool,
+)
+
+packages['libgcrypt'] = libgcrypt_factory = DependencyFactory(
+    'libgcrypt',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    configtool_class=LibGCryptDependencyConfigTool,
+)
+
+packages['libwmf'] = libwmf_factory = DependencyFactory(
+    'libwmf',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    configtool_class=LibWmfDependencyConfigTool,
+)
+
+packages['pcap'] = pcap_factory = DependencyFactory(
+    'pcap',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    configtool_class=PcapDependencyConfigTool,
+    pkgconfig_name='libpcap',
+)
+
+packages['threads'] = threads_factory = DependencyFactory(
+    'threads',
+    [DependencyMethods.SYSTEM, DependencyMethods.CMAKE],
+    cmake_name='Threads',
+    system_class=ThreadDependency,
+)
+
+packages['iconv'] = iconv_factory = DependencyFactory(
+    'iconv',
+    [DependencyMethods.BUILTIN, DependencyMethods.SYSTEM],
+    builtin_class=IconvBuiltinDependency,
+    system_class=IconvSystemDependency,
+)
+
+packages['intl'] = intl_factory = DependencyFactory(
+    'intl',
+    [DependencyMethods.BUILTIN, DependencyMethods.SYSTEM],
+    builtin_class=IntlBuiltinDependency,
+    system_class=IntlSystemDependency,
+)
+
+packages['openssl'] = openssl_factory = DependencyFactory(
+    'openssl',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM, DependencyMethods.CMAKE],
+    system_class=OpensslSystemDependency,
+    cmake_class=lambda name, env, kwargs: CMakeDependency('OpenSSL', env, dict(kwargs, modules=['OpenSSL::Crypto', 'OpenSSL::SSL'])),
+)
+
+packages['libcrypto'] = libcrypto_factory = DependencyFactory(
+    'libcrypto',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM, DependencyMethods.CMAKE],
+    system_class=OpensslSystemDependency,
+    cmake_class=lambda name, env, kwargs: CMakeDependency('OpenSSL', env, dict(kwargs, modules=['OpenSSL::Crypto'])),
+)
+
+packages['libssl'] = libssl_factory = DependencyFactory(
+    'libssl',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM, DependencyMethods.CMAKE],
+    system_class=OpensslSystemDependency,
+    cmake_class=lambda name, env, kwargs: CMakeDependency('OpenSSL', env, dict(kwargs, modules=['OpenSSL::SSL'])),
+)
diff --git a/vendored-meson/meson/mesonbuild/dependencies/mpi.py b/vendored-meson/meson/mesonbuild/dependencies/mpi.py
new file mode 100644
index 000000000000..910068143312
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/mpi.py
@@ -0,0 +1,240 @@
+# Copyright 2013-2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import functools
+import typing as T
+import os
+import re
+
+from ..environment import detect_cpu_family
+from .base import DependencyMethods, detect_compiler, SystemDependency
+from .configtool import ConfigToolDependency
+from .detect import packages
+from .factory import factory_methods
+from .pkgconfig import PkgConfigDependency
+
+if T.TYPE_CHECKING:
+    from .factory import DependencyGenerator
+    from ..environment import Environment, MachineChoice
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.SYSTEM})
+def mpi_factory(env: 'Environment',
+                for_machine: 'MachineChoice',
+                kwargs: T.Dict[str, T.Any],
+                methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+    language = kwargs.get('language', 'c')
+    if language not in {'c', 'cpp', 'fortran'}:
+        # OpenMPI doesn't work without any other languages
+        return []
+
+    candidates: T.List['DependencyGenerator'] = []
+    compiler = detect_compiler('mpi', env, for_machine, language)
+    if not compiler:
+        return []
+    compiler_is_intel = compiler.get_id() in {'intel', 'intel-cl'}
+
+    # Only OpenMPI has pkg-config, and it doesn't work with the intel compilers
+    if DependencyMethods.PKGCONFIG in methods and not compiler_is_intel:
+        pkg_name = None
+        if language == 'c':
+            pkg_name = 'ompi-c'
+        elif language == 'cpp':
+            pkg_name = 'ompi-cxx'
+        elif language == 'fortran':
+            pkg_name = 'ompi-fort'
+        candidates.append(functools.partial(
+            PkgConfigDependency, pkg_name, env, kwargs, language=language))
+
+    if DependencyMethods.CONFIG_TOOL in methods:
+        nwargs = kwargs.copy()
+
+        if compiler_is_intel:
+            if env.machines[for_machine].is_windows():
+                nwargs['version_arg'] = '-v'
+                nwargs['returncode_value'] = 3
+
+            if language == 'c':
+                tool_names = [os.environ.get('I_MPI_CC'), 'mpiicc']
+            elif language == 'cpp':
+                tool_names = [os.environ.get('I_MPI_CXX'), 'mpiicpc']
+            elif language == 'fortran':
+                tool_names = [os.environ.get('I_MPI_F90'), 'mpiifort']
+
+            cls = IntelMPIConfigToolDependency  # type: T.Type[ConfigToolDependency]
+        else: # OpenMPI, which doesn't work with intel
+            #
+            # We try the environment variables for the tools first, but then
+            # fall back to the hardcoded names
+            if language == 'c':
+                tool_names = [os.environ.get('MPICC'), 'mpicc']
+            elif language == 'cpp':
+                tool_names = [os.environ.get('MPICXX'), 'mpic++', 'mpicxx', 'mpiCC']
+            elif language == 'fortran':
+                tool_names = [os.environ.get(e) for e in ['MPIFC', 'MPIF90', 'MPIF77']]
+                tool_names.extend(['mpifort', 'mpif90', 'mpif77'])
+
+            cls = OpenMPIConfigToolDependency
+
+        tool_names = [t for t in tool_names if t]  # remove empty environment variables
+        assert tool_names
+
+        nwargs['tools'] = tool_names
+        candidates.append(functools.partial(
+            cls, tool_names[0], env, nwargs, language=language))
+
+    if DependencyMethods.SYSTEM in methods:
+        candidates.append(functools.partial(
+            MSMPIDependency, 'msmpi', env, kwargs, language=language))
+
+    return candidates
+
+packages['mpi'] = mpi_factory
+
+
+class _MPIConfigToolDependency(ConfigToolDependency):
+
+    def _filter_compile_args(self, args: T.List[str]) -> T.List[str]:
+        """
+        MPI wrappers return a bunch of garbage args.
+        Drop -O2 and everything that is not needed.
+        """
+        result = []
+        multi_args: T.Tuple[str, ...] = ('-I', )
+        if self.language == 'fortran':
+            fc = self.env.coredata.compilers[self.for_machine]['fortran']
+            multi_args += fc.get_module_incdir_args()
+
+        include_next = False
+        for f in args:
+            if f.startswith(('-D', '-f') + multi_args) or f == '-pthread' \
+                    or (f.startswith('-W') and f != '-Wall' and not f.startswith('-Werror')):
+                result.append(f)
+                if f in multi_args:
+                    # Path is a separate argument.
+                    include_next = True
+            elif include_next:
+                include_next = False
+                result.append(f)
+        return result
+
+    def _filter_link_args(self, args: T.List[str]) -> T.List[str]:
+        """
+        MPI wrappers return a bunch of garbage args.
+        Drop -O2 and everything that is not needed.
+        """
+        result = []
+        include_next = False
+        for f in args:
+            if self._is_link_arg(f):
+                result.append(f)
+                if f in {'-L', '-Xlinker'}:
+                    include_next = True
+            elif include_next:
+                include_next = False
+                result.append(f)
+        return result
+
+    def _is_link_arg(self, f: str) -> bool:
+        if self.clib_compiler.id == 'intel-cl':
+            return f == '/link' or f.startswith('/LIBPATH') or f.endswith('.lib')   # always .lib whether static or dynamic
+        else:
+            return (f.startswith(('-L', '-l', '-Xlinker')) or
+                    f == '-pthread' or
+                    (f.startswith('-W') and f != '-Wall' and not f.startswith('-Werror')))
+
+
+class IntelMPIConfigToolDependency(_MPIConfigToolDependency):
+
+    """Wrapper around Intel's mpiicc and friends."""
+
+    version_arg = '-v'  # --version is not the same as -v
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                 language: T.Optional[str] = None):
+        super().__init__(name, env, kwargs, language=language)
+        if not self.is_found:
+            return
+
+        args = self.get_config_value(['-show'], 'link and compile args')
+        self.compile_args = self._filter_compile_args(args)
+        self.link_args = self._filter_link_args(args)
+
+    def _sanitize_version(self, out: str) -> str:
+        v = re.search(r'(\d{4}) Update (\d)', out)
+        if v:
+            return '{}.{}'.format(v.group(1), v.group(2))
+        return out
+
+
+class OpenMPIConfigToolDependency(_MPIConfigToolDependency):
+
+    """Wrapper around OpenMPI mpicc and friends."""
+
+    version_arg = '--showme:version'
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                 language: T.Optional[str] = None):
+        super().__init__(name, env, kwargs, language=language)
+        if not self.is_found:
+            return
+
+        c_args = self.get_config_value(['--showme:compile'], 'compile_args')
+        self.compile_args = self._filter_compile_args(c_args)
+
+        l_args = self.get_config_value(['--showme:link'], 'link_args')
+        self.link_args = self._filter_link_args(l_args)
+
+    def _sanitize_version(self, out: str) -> str:
+        v = re.search(r'\d+.\d+.\d+', out)
+        if v:
+            return v.group(0)
+        return out
+
+
+class MSMPIDependency(SystemDependency):
+
+    """The Microsoft MPI."""
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                 language: T.Optional[str] = None):
+        super().__init__(name, env, kwargs, language=language)
+        # MSMPI only supports the C API
+        if language not in {'c', 'fortran', None}:
+            self.is_found = False
+            return
+        # MSMPI is only for windows, obviously
+        if not self.env.machines[self.for_machine].is_windows():
+            return
+
+        incdir = os.environ.get('MSMPI_INC')
+        arch = detect_cpu_family(self.env.coredata.compilers.host)
+        libdir = None
+        if arch == 'x86':
+            libdir = os.environ.get('MSMPI_LIB32')
+            post = 'x86'
+        elif arch == 'x86_64':
+            libdir = os.environ.get('MSMPI_LIB64')
+            post = 'x64'
+
+        if libdir is None or incdir is None:
+            self.is_found = False
+            return
+
+        self.is_found = True
+        self.link_args = ['-l' + os.path.join(libdir, 'msmpi')]
+        self.compile_args = ['-I' + incdir, '-I' + os.path.join(incdir, post)]
+        if self.language == 'fortran':
+            self.link_args.append('-l' + os.path.join(libdir, 'msmpifec'))
diff --git a/vendored-meson/meson/mesonbuild/dependencies/pkgconfig.py b/vendored-meson/meson/mesonbuild/dependencies/pkgconfig.py
new file mode 100644
index 000000000000..0c40847c9992
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/pkgconfig.py
@@ -0,0 +1,500 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from pathlib import Path
+
+from .base import ExternalDependency, DependencyException, sort_libpaths, DependencyTypeName
+from ..mesonlib import OptionKey, OrderedSet, PerMachine, Popen_safe, Popen_safe_logged
+from ..programs import find_external_program, ExternalProgram
+from .. import mlog
+from pathlib import PurePath
+import re
+import os
+import shlex
+import typing as T
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+    from ..mesonlib import MachineChoice
+    from ..utils.core import EnvironOrDict
+    from .._typing import ImmutableListProtocol
+    from ..build import EnvironmentVariables
+
+class PkgConfigDependency(ExternalDependency):
+    # The class's copy of the pkg-config path. Avoids having to search for it
+    # multiple times in the same Meson invocation.
+    class_pkgbin: PerMachine[T.Union[None, bool, ExternalProgram]] = PerMachine(None, None)
+    # We cache all pkg-config subprocess invocations to avoid redundant calls
+    pkgbin_cache: T.Dict[
+        T.Tuple[ExternalProgram, T.Tuple[str, ...], T.FrozenSet[T.Tuple[str, str]]],
+        T.Tuple[int, str, str]
+    ] = {}
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+        super().__init__(DependencyTypeName('pkgconfig'), environment, kwargs, language=language)
+        self.name = name
+        self.is_libtool = False
+        # Store a copy of the pkg-config path on the object itself so it is
+        # stored in the pickled coredata and recovered.
+        self.pkgbin = self._detect_pkgbin(self.silent, self.env, self.for_machine)
+        if self.pkgbin is False:
+            self.pkgbin = None
+            msg = f'Pkg-config binary for machine {self.for_machine} not found. Giving up.'
+            if self.required:
+                raise DependencyException(msg)
+            else:
+                mlog.debug(msg)
+                return
+
+        assert isinstance(self.pkgbin, ExternalProgram)
+        mlog.debug('Determining dependency {!r} with pkg-config executable '
+                   '{!r}'.format(name, self.pkgbin.get_path()))
+        ret, self.version, _ = self._call_pkgbin(['--modversion', name])
+        if ret != 0:
+            return
+
+        self.is_found = True
+
+        try:
+            # Fetch cargs to be used while using this dependency
+            self._set_cargs()
+            # Fetch the libraries and library paths needed for using this
+            self._set_libs()
+        except DependencyException as e:
+            mlog.debug(f"pkg-config error with '{name}': {e}")
+            if self.required:
+                raise
+            else:
+                self.compile_args = []
+                self.link_args = []
+                self.is_found = False
+                self.reason = e
+
+    def __repr__(self) -> str:
+        s = '<{0} {1}: {2} {3}>'
+        return s.format(self.__class__.__name__, self.name, self.is_found,
+                        self.version_reqs)
+
+    @classmethod
+    def _detect_pkgbin(cls, silent: bool, env: Environment,
+                       for_machine: MachineChoice) -> T.Union[None, bool, ExternalProgram]:
+        # Only search for pkg-config for each machine the first time and store
+        # the result in the class definition
+        if cls.class_pkgbin[for_machine] is False:
+            mlog.debug(f'Pkg-config binary for {for_machine} is cached as not found.')
+        elif cls.class_pkgbin[for_machine] is not None:
+            mlog.debug(f'Pkg-config binary for {for_machine} is cached.')
+        else:
+            assert cls.class_pkgbin[for_machine] is None, 'for mypy'
+            mlog.debug(f'Pkg-config binary for {for_machine} is not cached.')
+            for potential_pkgbin in find_external_program(
+                    env, for_machine, 'pkgconfig', 'Pkg-config',
+                    env.default_pkgconfig, allow_default_for_cross=False):
+                version_if_ok = cls.check_pkgconfig(env, potential_pkgbin)
+                if not version_if_ok:
+                    continue
+                if not silent:
+                    mlog.log('Found pkg-config:', mlog.bold(potential_pkgbin.get_path()),
+                             f'({version_if_ok})')
+                cls.class_pkgbin[for_machine] = potential_pkgbin
+                break
+            else:
+                if not silent:
+                    mlog.log('Found Pkg-config:', mlog.red('NO'))
+                # Set to False instead of None to signify that we've already
+                # searched for it and not found it
+                cls.class_pkgbin[for_machine] = False
+
+        return cls.class_pkgbin[for_machine]
+
+    def _call_pkgbin_real(self, args: T.List[str], env: T.Dict[str, str]) -> T.Tuple[int, str, str]:
+        assert isinstance(self.pkgbin, ExternalProgram)
+        cmd = self.pkgbin.get_command() + args
+        p, out, err = Popen_safe_logged(cmd, env=env)
+        return p.returncode, out.strip(), err.strip()
+
+    @staticmethod
+    def get_env(environment: 'Environment', for_machine: MachineChoice,
+                uninstalled: bool = False) -> 'EnvironmentVariables':
+        from ..build import EnvironmentVariables
+        env = EnvironmentVariables()
+        key = OptionKey('pkg_config_path', machine=for_machine)
+        extra_paths: T.List[str] = environment.coredata.options[key].value[:]
+        if uninstalled:
+            uninstalled_path = Path(environment.get_build_dir(), 'meson-uninstalled').as_posix()
+            if uninstalled_path not in extra_paths:
+                extra_paths.append(uninstalled_path)
+        env.set('PKG_CONFIG_PATH', extra_paths)
+        sysroot = environment.properties[for_machine].get_sys_root()
+        if sysroot:
+            env.set('PKG_CONFIG_SYSROOT_DIR', [sysroot])
+        pkg_config_libdir_prop = environment.properties[for_machine].get_pkg_config_libdir()
+        if pkg_config_libdir_prop:
+            env.set('PKG_CONFIG_LIBDIR', pkg_config_libdir_prop)
+        return env
+
+    @staticmethod
+    def setup_env(env: EnvironOrDict, environment: 'Environment', for_machine: MachineChoice,
+                  uninstalled: bool = False) -> T.Dict[str, str]:
+        envvars = PkgConfigDependency.get_env(environment, for_machine, uninstalled)
+        env = envvars.get_env(env)
+        # Dump all PKG_CONFIG environment variables
+        for key, value in env.items():
+            if key.startswith('PKG_'):
+                mlog.debug(f'env[{key}]: {value}')
+        return env
+
+    def _call_pkgbin(self, args: T.List[str], env: T.Optional[EnvironOrDict] = None) -> T.Tuple[int, str, str]:
+        assert isinstance(self.pkgbin, ExternalProgram)
+        env = env or os.environ
+        env = PkgConfigDependency.setup_env(env, self.env, self.for_machine)
+
+        fenv = frozenset(env.items())
+        targs = tuple(args)
+        cache = PkgConfigDependency.pkgbin_cache
+        if (self.pkgbin, targs, fenv) not in cache:
+            cache[(self.pkgbin, targs, fenv)] = self._call_pkgbin_real(args, env)
+        return cache[(self.pkgbin, targs, fenv)]
+
+    def _convert_mingw_paths(self, args: T.List[str]) -> T.List[str]:
+        '''
+        Both MSVC and native Python on Windows cannot handle MinGW-esque /c/foo
+        paths so convert them to C:/foo. We cannot resolve other paths starting
+        with / like /home/foo so leave them as-is so that the user gets an
+        error/warning from the compiler/linker.
+        '''
+        if not self.env.machines.build.is_windows():
+            return args
+        converted = []
+        for arg in args:
+            pargs: T.Tuple[str, ...] = tuple()
+            # Library search path
+            if arg.startswith('-L/'):
+                pargs = PurePath(arg[2:]).parts
+                tmpl = '-L{}:/{}'
+            elif arg.startswith('-I/'):
+                pargs = PurePath(arg[2:]).parts
+                tmpl = '-I{}:/{}'
+            # Full path to library or .la file
+            elif arg.startswith('/'):
+                pargs = PurePath(arg).parts
+                tmpl = '{}:/{}'
+            elif arg.startswith(('-L', '-I')) or (len(arg) > 2 and arg[1] == ':'):
+                # clean out improper '\\ ' as comes from some Windows pkg-config files
+                arg = arg.replace('\\ ', ' ')
+            if len(pargs) > 1 and len(pargs[1]) == 1:
+                arg = tmpl.format(pargs[1], '/'.join(pargs[2:]))
+            converted.append(arg)
+        return converted
+
+    def _split_args(self, cmd: str) -> T.List[str]:
+        # pkg-config paths follow Unix conventions, even on Windows; split the
+        # output using shlex.split rather than mesonlib.split_args
+        return shlex.split(cmd)
+
+    def _set_cargs(self) -> None:
+        env = None
+        if self.language == 'fortran':
+            # gfortran doesn't appear to look in system paths for INCLUDE files,
+            # so don't allow pkg-config to suppress -I flags for system paths
+            env = os.environ.copy()
+            env['PKG_CONFIG_ALLOW_SYSTEM_CFLAGS'] = '1'
+        ret, out, err = self._call_pkgbin(['--cflags', self.name], env=env)
+        if ret != 0:
+            raise DependencyException(f'Could not generate cargs for {self.name}:\n{err}\n')
+        self.compile_args = self._convert_mingw_paths(self._split_args(out))
+
+    def _search_libs(self, out: str, out_raw: str) -> T.Tuple[T.List[str], T.List[str]]:
+        '''
+        @out: PKG_CONFIG_ALLOW_SYSTEM_LIBS=1 pkg-config --libs
+        @out_raw: pkg-config --libs
+
+        We always look for the file ourselves instead of depending on the
+        compiler to find it with -lfoo or foo.lib (if possible) because:
+        1. We want to be able to select static or shared
+        2. We need the full path of the library to calculate RPATH values
+        3. De-dup of libraries is easier when we have absolute paths
+
+        Libraries that are provided by the toolchain or are not found by
+        find_library() will be added with -L -l pairs.
+        '''
+        # Library paths should be safe to de-dup
+        #
+        # First, figure out what library paths to use. Originally, we were
+        # doing this as part of the loop, but due to differences in the order
+        # of -L values between pkg-config and pkgconf, we need to do that as
+        # a separate step. See:
+        # https://github.com/mesonbuild/meson/issues/3951
+        # https://github.com/mesonbuild/meson/issues/4023
+        #
+        # Separate system and prefix paths, and ensure that prefix paths are
+        # always searched first.
+        prefix_libpaths: OrderedSet[str] = OrderedSet()
+        # We also store this raw_link_args on the object later
+        raw_link_args = self._convert_mingw_paths(self._split_args(out_raw))
+        for arg in raw_link_args:
+            if arg.startswith('-L') and not arg.startswith(('-L-l', '-L-L')):
+                path = arg[2:]
+                if not os.path.isabs(path):
+                    # Resolve the path as a compiler in the build directory would
+                    path = os.path.join(self.env.get_build_dir(), path)
+                prefix_libpaths.add(path)
+        # Library paths are not always ordered in a meaningful way
+        #
+        # Instead of relying on pkg-config or pkgconf to provide -L flags in a
+        # specific order, we reorder library paths ourselves, according to th
+        # order specified in PKG_CONFIG_PATH. See:
+        # https://github.com/mesonbuild/meson/issues/4271
+        #
+        # Only prefix_libpaths are reordered here because there should not be
+        # too many system_libpaths to cause library version issues.
+        pkg_config_path: T.List[str] = self.env.coredata.options[OptionKey('pkg_config_path', machine=self.for_machine)].value
+        pkg_config_path = self._convert_mingw_paths(pkg_config_path)
+        prefix_libpaths = OrderedSet(sort_libpaths(list(prefix_libpaths), pkg_config_path))
+        system_libpaths: OrderedSet[str] = OrderedSet()
+        full_args = self._convert_mingw_paths(self._split_args(out))
+        for arg in full_args:
+            if arg.startswith(('-L-l', '-L-L')):
+                # These are D language arguments, not library paths
+                continue
+            if arg.startswith('-L') and arg[2:] not in prefix_libpaths:
+                system_libpaths.add(arg[2:])
+        # Use this re-ordered path list for library resolution
+        libpaths = list(prefix_libpaths) + list(system_libpaths)
+        # Track -lfoo libraries to avoid duplicate work
+        libs_found: OrderedSet[str] = OrderedSet()
+        # Track not-found libraries to know whether to add library paths
+        libs_notfound = []
+        # Generate link arguments for this library
+        link_args = []
+        for lib in full_args:
+            if lib.startswith(('-L-l', '-L-L')):
+                # These are D language arguments, add them as-is
+                pass
+            elif lib.startswith('-L'):
+                # We already handled library paths above
+                continue
+            elif lib.startswith('-l:'):
+                # see: https://stackoverflow.com/questions/48532868/gcc-library-option-with-a-colon-llibevent-a
+                # also : See the documentation of -lnamespec | --library=namespec in the linker manual
+                #                     https://sourceware.org/binutils/docs-2.18/ld/Options.html
+
+                # Don't resolve the same -l:libfoo.a argument again
+                if lib in libs_found:
+                    continue
+                libfilename = lib[3:]
+                foundname = None
+                for libdir in libpaths:
+                    target = os.path.join(libdir, libfilename)
+                    if os.path.exists(target):
+                        foundname = target
+                        break
+                if foundname is None:
+                    if lib in libs_notfound:
+                        continue
+                    else:
+                        mlog.warning('Library {!r} not found for dependency {!r}, may '
+                                     'not be successfully linked'.format(libfilename, self.name))
+                    libs_notfound.append(lib)
+                else:
+                    lib = foundname
+            elif lib.startswith('-l'):
+                # Don't resolve the same -lfoo argument again
+                if lib in libs_found:
+                    continue
+                if self.clib_compiler:
+                    args = self.clib_compiler.find_library(lib[2:], self.env,
+                                                           libpaths, self.libtype,
+                                                           lib_prefix_warning=False)
+                # If the project only uses a non-clib language such as D, Rust,
+                # C#, Python, etc, all we can do is limp along by adding the
+                # arguments as-is and then adding the libpaths at the end.
+                else:
+                    args = None
+                if args is not None:
+                    libs_found.add(lib)
+                    # Replace -l arg with full path to library if available
+                    # else, library is either to be ignored, or is provided by
+                    # the compiler, can't be resolved, and should be used as-is
+                    if args:
+                        if not args[0].startswith('-l'):
+                            lib = args[0]
+                    else:
+                        continue
+                else:
+                    # Library wasn't found, maybe we're looking in the wrong
+                    # places or the library will be provided with LDFLAGS or
+                    # LIBRARY_PATH from the environment (on macOS), and many
+                    # other edge cases that we can't account for.
+                    #
+                    # Add all -L paths and use it as -lfoo
+                    if lib in libs_notfound:
+                        continue
+                    if self.static:
+                        mlog.warning('Static library {!r} not found for dependency {!r}, may '
+                                     'not be statically linked'.format(lib[2:], self.name))
+                    libs_notfound.append(lib)
+            elif lib.endswith(".la"):
+                shared_libname = self.extract_libtool_shlib(lib)
+                shared_lib = os.path.join(os.path.dirname(lib), shared_libname)
+                if not os.path.exists(shared_lib):
+                    shared_lib = os.path.join(os.path.dirname(lib), ".libs", shared_libname)
+
+                if not os.path.exists(shared_lib):
+                    raise DependencyException(f'Got a libtools specific "{lib}" dependencies'
+                                              'but we could not compute the actual shared'
+                                              'library path')
+                self.is_libtool = True
+                lib = shared_lib
+                if lib in link_args:
+                    continue
+            link_args.append(lib)
+        # Add all -Lbar args if we have -lfoo args in link_args
+        if libs_notfound:
+            # Order of -L flags doesn't matter with ld, but it might with other
+            # linkers such as MSVC, so prepend them.
+            link_args = ['-L' + lp for lp in prefix_libpaths] + link_args
+        return link_args, raw_link_args
+
+    def _set_libs(self) -> None:
+        env = None
+        libcmd = ['--libs']
+
+        if self.static:
+            libcmd.append('--static')
+
+        libcmd.append(self.name)
+
+        # Force pkg-config to output -L fields even if they are system
+        # paths so we can do manual searching with cc.find_library() later.
+        env = os.environ.copy()
+        env['PKG_CONFIG_ALLOW_SYSTEM_LIBS'] = '1'
+        ret, out, err = self._call_pkgbin(libcmd, env=env)
+        if ret != 0:
+            raise DependencyException(f'Could not generate libs for {self.name}:\n{err}\n')
+        # Also get the 'raw' output without -Lfoo system paths for adding -L
+        # args with -lfoo when a library can't be found, and also in
+        # gnome.generate_gir + gnome.gtkdoc which need -L -l arguments.
+        ret, out_raw, err_raw = self._call_pkgbin(libcmd)
+        if ret != 0:
+            raise DependencyException(f'Could not generate libs for {self.name}:\n\n{out_raw}')
+        self.link_args, self.raw_link_args = self._search_libs(out, out_raw)
+
+    def get_pkgconfig_variable(self, variable_name: str,
+                               define_variable: 'ImmutableListProtocol[str]',
+                               default: T.Optional[str]) -> str:
+        options = ['--variable=' + variable_name, self.name]
+
+        if define_variable:
+            options = ['--define-variable=' + '='.join(define_variable)] + options
+
+        ret, out, err = self._call_pkgbin(options)
+        variable = ''
+        if ret != 0:
+            if self.required:
+                raise DependencyException(f'dependency {self.name} not found:\n{err}\n')
+        else:
+            variable = out.strip()
+
+            # pkg-config doesn't distinguish between empty and nonexistent variables
+            # use the variable list to check for variable existence
+            if not variable:
+                ret, out, _ = self._call_pkgbin(['--print-variables', self.name])
+                if not re.search(r'^' + variable_name + r'$', out, re.MULTILINE):
+                    if default is not None:
+                        variable = default
+                    else:
+                        mlog.warning(f"pkgconfig variable '{variable_name}' not defined for dependency {self.name}.")
+
+        mlog.debug(f'Got pkgconfig variable {variable_name} : {variable}')
+        return variable
+
+    @staticmethod
+    def check_pkgconfig(env: Environment, pkgbin: ExternalProgram) -> T.Optional[str]:
+        if not pkgbin.found():
+            mlog.log(f'Did not find pkg-config by name {pkgbin.name!r}')
+            return None
+        command_as_string = ' '.join(pkgbin.get_command())
+        try:
+            helptext = Popen_safe(pkgbin.get_command() + ['--help'])[1]
+            if 'Pure-Perl' in helptext:
+                mlog.log(f'found pkg-config {command_as_string!r} but it is Strawberry Perl and thus broken. Ignoring...')
+                return None
+            p, out = Popen_safe(pkgbin.get_command() + ['--version'])[0:2]
+            if p.returncode != 0:
+                mlog.warning(f'Found pkg-config {command_as_string!r} but it failed when run')
+                return None
+        except FileNotFoundError:
+            mlog.warning(f'We thought we found pkg-config {command_as_string!r} but now it\'s not there. How odd!')
+            return None
+        except PermissionError:
+            msg = f'Found pkg-config {command_as_string!r} but didn\'t have permissions to run it.'
+            if not env.machines.build.is_windows():
+                msg += '\n\nOn Unix-like systems this is often caused by scripts that are not executable.'
+            mlog.warning(msg)
+            return None
+        return out.strip()
+
+    def extract_field(self, la_file: str, fieldname: str) -> T.Optional[str]:
+        with open(la_file, encoding='utf-8') as f:
+            for line in f:
+                arr = line.strip().split('=')
+                if arr[0] == fieldname:
+                    return arr[1][1:-1]
+        return None
+
+    def extract_dlname_field(self, la_file: str) -> T.Optional[str]:
+        return self.extract_field(la_file, 'dlname')
+
+    def extract_libdir_field(self, la_file: str) -> T.Optional[str]:
+        return self.extract_field(la_file, 'libdir')
+
+    def extract_libtool_shlib(self, la_file: str) -> T.Optional[str]:
+        '''
+        Returns the path to the shared library
+        corresponding to this .la file
+        '''
+        dlname = self.extract_dlname_field(la_file)
+        if dlname is None:
+            return None
+
+        # Darwin uses absolute paths where possible; since the libtool files never
+        # contain absolute paths, use the libdir field
+        if self.env.machines[self.for_machine].is_darwin():
+            dlbasename = os.path.basename(dlname)
+            libdir = self.extract_libdir_field(la_file)
+            if libdir is None:
+                return dlbasename
+            return os.path.join(libdir, dlbasename)
+        # From the comments in extract_libtool(), older libtools had
+        # a path rather than the raw dlname
+        return os.path.basename(dlname)
+
+    @staticmethod
+    def log_tried() -> str:
+        return 'pkgconfig'
+
+    def get_variable(self, *, cmake: T.Optional[str] = None, pkgconfig: T.Optional[str] = None,
+                     configtool: T.Optional[str] = None, internal: T.Optional[str] = None,
+                     default_value: T.Optional[str] = None,
+                     pkgconfig_define: T.Optional[T.List[str]] = None) -> str:
+        if pkgconfig:
+            try:
+                return self.get_pkgconfig_variable(pkgconfig, pkgconfig_define or [], default_value)
+            except DependencyException:
+                pass
+        if default_value is not None:
+            return default_value
+        raise DependencyException(f'Could not get pkg-config variable and no default provided for {self!r}')
diff --git a/vendored-meson/meson/mesonbuild/dependencies/platform.py b/vendored-meson/meson/mesonbuild/dependencies/platform.py
new file mode 100644
index 000000000000..87726b57949f
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/platform.py
@@ -0,0 +1,63 @@
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies that are
+# platform-specific (generally speaking).
+from __future__ import annotations
+
+from .base import DependencyTypeName, ExternalDependency, DependencyException
+from .detect import packages
+from ..mesonlib import MesonException
+import typing as T
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+
+class AppleFrameworks(ExternalDependency):
+    def __init__(self, env: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__(DependencyTypeName('appleframeworks'), env, kwargs)
+        modules = kwargs.get('modules', [])
+        if isinstance(modules, str):
+            modules = [modules]
+        if not modules:
+            raise DependencyException("AppleFrameworks dependency requires at least one module.")
+        self.frameworks = modules
+        if not self.clib_compiler:
+            raise DependencyException('No C-like compilers are available, cannot find the framework')
+        self.is_found = True
+        for f in self.frameworks:
+            try:
+                args = self.clib_compiler.find_framework(f, env, [])
+            except MesonException as e:
+                if 'non-clang' in str(e):
+                    self.is_found = False
+                    self.link_args = []
+                    self.compile_args = []
+                    return
+                raise
+
+            if args is not None:
+                # No compile args are needed for system frameworks
+                self.link_args += args
+            else:
+                self.is_found = False
+
+    def log_info(self) -> str:
+        return ', '.join(self.frameworks)
+
+    @staticmethod
+    def log_tried() -> str:
+        return 'framework'
+
+packages['appleframeworks'] = AppleFrameworks
diff --git a/vendored-meson/meson/mesonbuild/dependencies/python.py b/vendored-meson/meson/mesonbuild/dependencies/python.py
new file mode 100644
index 000000000000..1607728883df
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/python.py
@@ -0,0 +1,417 @@
+# Copyright 2022 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import functools, json, os, textwrap
+from pathlib import Path
+import typing as T
+
+from .. import mesonlib, mlog
+from .base import process_method_kw, DependencyMethods, DependencyTypeName, ExternalDependency, SystemDependency
+from .configtool import ConfigToolDependency
+from .detect import packages
+from .factory import DependencyFactory
+from .framework import ExtraFrameworkDependency
+from .pkgconfig import PkgConfigDependency
+from ..environment import detect_cpu_family
+from ..programs import ExternalProgram
+
+if T.TYPE_CHECKING:
+    from typing_extensions import TypedDict
+
+    from .factory import DependencyGenerator
+    from ..environment import Environment
+    from ..mesonlib import MachineChoice
+
+    class PythonIntrospectionDict(TypedDict):
+
+        install_paths: T.Dict[str, str]
+        is_pypy: bool
+        is_venv: bool
+        link_libpython: bool
+        sysconfig_paths: T.Dict[str, str]
+        paths: T.Dict[str, str]
+        platform: str
+        suffix: str
+        variables: T.Dict[str, str]
+        version: str
+
+    _Base = ExternalDependency
+else:
+    _Base = object
+
+
+class Pybind11ConfigToolDependency(ConfigToolDependency):
+
+    tools = ['pybind11-config']
+
+    # any version of the tool is valid, since this is header-only
+    allow_default_for_cross = True
+
+    # pybind11 in 2.10.4 added --version, sanity-check another flag unique to it
+    # in the meantime
+    skip_version = '--pkgconfigdir'
+
+    def __init__(self, name: str, environment: Environment, kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--includes'], 'compile_args')
+
+
+class BasicPythonExternalProgram(ExternalProgram):
+    def __init__(self, name: str, command: T.Optional[T.List[str]] = None,
+                 ext_prog: T.Optional[ExternalProgram] = None):
+        if ext_prog is None:
+            super().__init__(name, command=command, silent=True)
+        else:
+            self.name = name
+            self.command = ext_prog.command
+            self.path = ext_prog.path
+            self.cached_version = None
+
+        # We want strong key values, so we always populate this with bogus data.
+        # Otherwise to make the type checkers happy we'd have to do .get() for
+        # everycall, even though we know that the introspection data will be
+        # complete
+        self.info: 'PythonIntrospectionDict' = {
+            'install_paths': {},
+            'is_pypy': False,
+            'is_venv': False,
+            'link_libpython': False,
+            'sysconfig_paths': {},
+            'paths': {},
+            'platform': 'sentinel',
+            'suffix': 'sentinel',
+            'variables': {},
+            'version': '0.0',
+        }
+        self.pure: bool = True
+
+    def _check_version(self, version: str) -> bool:
+        if self.name == 'python2':
+            return mesonlib.version_compare(version, '< 3.0')
+        elif self.name == 'python3':
+            return mesonlib.version_compare(version, '>= 3.0')
+        return True
+
+    def sanity(self) -> bool:
+        # Sanity check, we expect to have something that at least quacks in tune
+
+        import importlib.resources
+
+        with importlib.resources.path('mesonbuild.scripts', 'python_info.py') as f:
+            cmd = self.get_command() + [str(f)]
+            p, stdout, stderr = mesonlib.Popen_safe(cmd)
+
+        try:
+            info = json.loads(stdout)
+        except json.JSONDecodeError:
+            info = None
+            mlog.debug('Could not introspect Python (%s): exit code %d' % (str(p.args), p.returncode))
+            mlog.debug('Program stdout:\n')
+            mlog.debug(stdout)
+            mlog.debug('Program stderr:\n')
+            mlog.debug(stderr)
+
+        if info is not None and self._check_version(info['version']):
+            self.info = T.cast('PythonIntrospectionDict', info)
+            return True
+        else:
+            return False
+
+
+class _PythonDependencyBase(_Base):
+
+    def __init__(self, python_holder: 'BasicPythonExternalProgram', embed: bool):
+        self.embed = embed
+        self.version: str = python_holder.info['version']
+        self.platform = python_holder.info['platform']
+        self.variables = python_holder.info['variables']
+        self.paths = python_holder.info['paths']
+        self.is_pypy = python_holder.info['is_pypy']
+        # The "-embed" version of python.pc / python-config was introduced in 3.8,
+        # and distutils extension linking was changed to be considered a non embed
+        # usage. Before then, this dependency always uses the embed=True handling
+        # because that is the only one that exists.
+        #
+        # On macOS and some Linux distros (Debian) distutils doesn't link extensions
+        # against libpython, even on 3.7 and below. We call into distutils and
+        # mirror its behavior. See https://github.com/mesonbuild/meson/issues/4117
+        self.link_libpython = python_holder.info['link_libpython'] or embed
+        self.info: T.Optional[T.Dict[str, str]] = None
+        if mesonlib.version_compare(self.version, '>= 3.0'):
+            self.major_version = 3
+        else:
+            self.major_version = 2
+
+
+class PythonPkgConfigDependency(PkgConfigDependency, _PythonDependencyBase):
+
+    def __init__(self, name: str, environment: 'Environment',
+                 kwargs: T.Dict[str, T.Any], installation: 'BasicPythonExternalProgram',
+                 libpc: bool = False):
+        if libpc:
+            mlog.debug(f'Searching for {name!r} via pkgconfig lookup in LIBPC')
+        else:
+            mlog.debug(f'Searching for {name!r} via fallback pkgconfig lookup in default paths')
+
+        PkgConfigDependency.__init__(self, name, environment, kwargs)
+        _PythonDependencyBase.__init__(self, installation, kwargs.get('embed', False))
+
+        if libpc and not self.is_found:
+            mlog.debug(f'"python-{self.version}" could not be found in LIBPC, this is likely due to a relocated python installation')
+
+        # pkg-config files are usually accurate starting with python 3.8
+        if not self.link_libpython and mesonlib.version_compare(self.version, '< 3.8'):
+            self.link_args = []
+
+
+class PythonFrameworkDependency(ExtraFrameworkDependency, _PythonDependencyBase):
+
+    def __init__(self, name: str, environment: 'Environment',
+                 kwargs: T.Dict[str, T.Any], installation: 'BasicPythonExternalProgram'):
+        ExtraFrameworkDependency.__init__(self, name, environment, kwargs)
+        _PythonDependencyBase.__init__(self, installation, kwargs.get('embed', False))
+
+
+class PythonSystemDependency(SystemDependency, _PythonDependencyBase):
+
+    def __init__(self, name: str, environment: 'Environment',
+                 kwargs: T.Dict[str, T.Any], installation: 'BasicPythonExternalProgram'):
+        SystemDependency.__init__(self, name, environment, kwargs)
+        _PythonDependencyBase.__init__(self, installation, kwargs.get('embed', False))
+
+        # match pkg-config behavior
+        if self.link_libpython:
+            # link args
+            if mesonlib.is_windows():
+                self.find_libpy_windows(environment)
+            else:
+                self.find_libpy(environment)
+        else:
+            self.is_found = True
+
+        # compile args
+        inc_paths = mesonlib.OrderedSet([
+            self.variables.get('INCLUDEPY'),
+            self.paths.get('include'),
+            self.paths.get('platinclude')])
+
+        self.compile_args += ['-I' + path for path in inc_paths if path]
+
+        # https://sourceforge.net/p/mingw-w64/mailman/message/30504611/
+        # https://github.com/python/cpython/pull/100137
+        if mesonlib.is_windows() and self.get_windows_python_arch().endswith('64') and mesonlib.version_compare(self.version, '<3.12'):
+            self.compile_args += ['-DMS_WIN64=']
+
+        if not self.clib_compiler.has_header('Python.h', '', environment, extra_args=self.compile_args):
+            self.is_found = False
+
+    def find_libpy(self, environment: 'Environment') -> None:
+        if self.is_pypy:
+            if self.major_version == 3:
+                libname = 'pypy3-c'
+            else:
+                libname = 'pypy-c'
+            libdir = os.path.join(self.variables.get('base'), 'bin')
+            libdirs = [libdir]
+        else:
+            libname = f'python{self.version}'
+            if 'DEBUG_EXT' in self.variables:
+                libname += self.variables['DEBUG_EXT']
+            if 'ABIFLAGS' in self.variables:
+                libname += self.variables['ABIFLAGS']
+            libdirs = []
+
+        largs = self.clib_compiler.find_library(libname, environment, libdirs)
+        if largs is not None:
+            self.link_args = largs
+            self.is_found = True
+
+    def get_windows_python_arch(self) -> T.Optional[str]:
+        if self.platform == 'mingw':
+            pycc = self.variables.get('CC')
+            if pycc.startswith('x86_64'):
+                return 'x86_64'
+            elif pycc.startswith(('i686', 'i386')):
+                return 'x86'
+            else:
+                mlog.log(f'MinGW Python built with unknown CC {pycc!r}, please file a bug')
+                return None
+        elif self.platform == 'win32':
+            return 'x86'
+        elif self.platform in {'win64', 'win-amd64'}:
+            return 'x86_64'
+        elif self.platform in {'win-arm64'}:
+            return 'aarch64'
+        mlog.log(f'Unknown Windows Python platform {self.platform!r}')
+        return None
+
+    def get_windows_link_args(self) -> T.Optional[T.List[str]]:
+        if self.platform.startswith('win'):
+            vernum = self.variables.get('py_version_nodot')
+            verdot = self.variables.get('py_version_short')
+            imp_lower = self.variables.get('implementation_lower', 'python')
+            if self.static:
+                libpath = Path('libs') / f'libpython{vernum}.a'
+            else:
+                comp = self.get_compiler()
+                if comp.id == "gcc":
+                    if imp_lower == 'pypy' and verdot == '3.8':
+                        # The naming changed between 3.8 and 3.9
+                        libpath = Path('libpypy3-c.dll')
+                    elif imp_lower == 'pypy':
+                        libpath = Path(f'libpypy{verdot}-c.dll')
+                    else:
+                        libpath = Path(f'python{vernum}.dll')
+                else:
+                    libpath = Path('libs') / f'python{vernum}.lib'
+                    # For a debug build, pyconfig.h may force linking with
+                    # pythonX_d.lib (see meson#10776). This cannot be avoided
+                    # and won't work unless we also have a debug build of
+                    # Python itself (except with pybind11, which has an ugly
+                    # hack to work around this) - so emit a warning to explain
+                    # the cause of the expected link error.
+                    buildtype = self.env.coredata.get_option(mesonlib.OptionKey('buildtype'))
+                    assert isinstance(buildtype, str)
+                    debug = self.env.coredata.get_option(mesonlib.OptionKey('debug'))
+                    # `debugoptimized` buildtype may not set debug=True currently, see gh-11645
+                    is_debug_build = debug or buildtype == 'debug'
+                    vscrt_debug = False
+                    if mesonlib.OptionKey('b_vscrt') in self.env.coredata.options:
+                        vscrt = self.env.coredata.options[mesonlib.OptionKey('b_vscrt')].value
+                        if vscrt in {'mdd', 'mtd', 'from_buildtype', 'static_from_buildtype'}:
+                            vscrt_debug = True
+                    if is_debug_build and vscrt_debug and not self.variables.get('Py_DEBUG'):
+                        mlog.warning(textwrap.dedent('''\
+                            Using a debug build type with MSVC or an MSVC-compatible compiler
+                            when the Python interpreter is not also a debug build will almost
+                            certainly result in a failed build. Prefer using a release build
+                            type or a debug Python interpreter.
+                            '''))
+            # base_prefix to allow for virtualenvs.
+            lib = Path(self.variables.get('base_prefix')) / libpath
+        elif self.platform == 'mingw':
+            if self.static:
+                libname = self.variables.get('LIBRARY')
+            else:
+                libname = self.variables.get('LDLIBRARY')
+            lib = Path(self.variables.get('LIBDIR')) / libname
+        else:
+            raise mesonlib.MesonBugException(
+                'On a Windows path, but the OS doesn\'t appear to be Windows or MinGW.')
+        if not lib.exists():
+            mlog.log('Could not find Python3 library {!r}'.format(str(lib)))
+            return None
+        return [str(lib)]
+
+    def find_libpy_windows(self, env: 'Environment') -> None:
+        '''
+        Find python3 libraries on Windows and also verify that the arch matches
+        what we are building for.
+        '''
+        pyarch = self.get_windows_python_arch()
+        if pyarch is None:
+            self.is_found = False
+            return
+        arch = detect_cpu_family(env.coredata.compilers.host)
+        if arch != pyarch:
+            mlog.log('Need', mlog.bold(self.name), f'for {arch}, but found {pyarch}')
+            self.is_found = False
+            return
+        # This can fail if the library is not found
+        largs = self.get_windows_link_args()
+        if largs is None:
+            self.is_found = False
+            return
+        self.link_args = largs
+        self.is_found = True
+
+    @staticmethod
+    def log_tried() -> str:
+        return 'sysconfig'
+
+def python_factory(env: 'Environment', for_machine: 'MachineChoice',
+                   kwargs: T.Dict[str, T.Any],
+                   installation: T.Optional['BasicPythonExternalProgram'] = None) -> T.List['DependencyGenerator']:
+    # We can't use the factory_methods decorator here, as we need to pass the
+    # extra installation argument
+    methods = process_method_kw({DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM}, kwargs)
+    embed = kwargs.get('embed', False)
+    candidates: T.List['DependencyGenerator'] = []
+    from_installation = installation is not None
+    # When not invoked through the python module, default installation.
+    if installation is None:
+        installation = BasicPythonExternalProgram('python3', mesonlib.python_command)
+        installation.sanity()
+    pkg_version = installation.info['variables'].get('LDVERSION') or installation.info['version']
+
+    if DependencyMethods.PKGCONFIG in methods:
+        if from_installation:
+            pkg_libdir = installation.info['variables'].get('LIBPC')
+            pkg_embed = '-embed' if embed and mesonlib.version_compare(installation.info['version'], '>=3.8') else ''
+            pkg_name = f'python-{pkg_version}{pkg_embed}'
+
+            # If python-X.Y.pc exists in LIBPC, we will try to use it
+            def wrap_in_pythons_pc_dir(name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                                       installation: 'BasicPythonExternalProgram') -> 'ExternalDependency':
+                if not pkg_libdir:
+                    # there is no LIBPC, so we can't search in it
+                    empty = ExternalDependency(DependencyTypeName('pkgconfig'), env, {})
+                    empty.name = 'python'
+                    return empty
+
+                old_pkg_libdir = os.environ.pop('PKG_CONFIG_LIBDIR', None)
+                old_pkg_path = os.environ.pop('PKG_CONFIG_PATH', None)
+                os.environ['PKG_CONFIG_LIBDIR'] = pkg_libdir
+                try:
+                    return PythonPkgConfigDependency(name, env, kwargs, installation, True)
+                finally:
+                    def set_env(name: str, value: str) -> None:
+                        if value is not None:
+                            os.environ[name] = value
+                        elif name in os.environ:
+                            del os.environ[name]
+                    set_env('PKG_CONFIG_LIBDIR', old_pkg_libdir)
+                    set_env('PKG_CONFIG_PATH', old_pkg_path)
+
+            candidates.append(functools.partial(wrap_in_pythons_pc_dir, pkg_name, env, kwargs, installation))
+            # We only need to check both, if a python install has a LIBPC. It might point to the wrong location,
+            # e.g. relocated / cross compilation, but the presence of LIBPC indicates we should definitely look for something.
+            if pkg_libdir is not None:
+                candidates.append(functools.partial(PythonPkgConfigDependency, pkg_name, env, kwargs, installation))
+        else:
+            candidates.append(functools.partial(PkgConfigDependency, 'python3', env, kwargs))
+
+    if DependencyMethods.SYSTEM in methods:
+        candidates.append(functools.partial(PythonSystemDependency, 'python', env, kwargs, installation))
+
+    if DependencyMethods.EXTRAFRAMEWORK in methods:
+        nkwargs = kwargs.copy()
+        if mesonlib.version_compare(pkg_version, '>= 3'):
+            # There is a python in /System/Library/Frameworks, but that's python 2.x,
+            # Python 3 will always be in /Library
+            nkwargs['paths'] = ['/Library/Frameworks']
+        candidates.append(functools.partial(PythonFrameworkDependency, 'Python', env, nkwargs, installation))
+
+    return candidates
+
+packages['python3'] = python_factory
+
+packages['pybind11'] = pybind11_factory = DependencyFactory(
+    'pybind11',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.CMAKE],
+    configtool_class=Pybind11ConfigToolDependency,
+)
diff --git a/vendored-meson/meson/mesonbuild/dependencies/qt.py b/vendored-meson/meson/mesonbuild/dependencies/qt.py
new file mode 100644
index 000000000000..1a86bd28a8ea
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/qt.py
@@ -0,0 +1,490 @@
+# Copyright 2013-2017 The Meson development team
+# Copyright © 2021 Intel Corporation
+# SPDX-license-identifier: Apache-2.0
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Dependency finders for the Qt framework."""
+
+import abc
+import re
+import os
+import typing as T
+
+from .base import DependencyException, DependencyMethods
+from .configtool import ConfigToolDependency
+from .detect import packages
+from .framework import ExtraFrameworkDependency
+from .pkgconfig import PkgConfigDependency
+from .factory import DependencyFactory
+from .. import mlog
+from .. import mesonlib
+
+if T.TYPE_CHECKING:
+    from ..compilers import Compiler
+    from ..envconfig import MachineInfo
+    from ..environment import Environment
+    from ..dependencies import MissingCompiler
+
+
+def _qt_get_private_includes(mod_inc_dir: str, module: str, mod_version: str) -> T.List[str]:
+    # usually Qt5 puts private headers in /QT_INSTALL_HEADERS/module/VERSION/module/private
+    # except for at least QtWebkit and Enginio where the module version doesn't match Qt version
+    # as an example with Qt 5.10.1 on linux you would get:
+    # /usr/include/qt5/QtCore/5.10.1/QtCore/private/
+    # /usr/include/qt5/QtWidgets/5.10.1/QtWidgets/private/
+    # /usr/include/qt5/QtWebKit/5.212.0/QtWebKit/private/
+
+    # on Qt4 when available private folder is directly in module folder
+    # like /usr/include/QtCore/private/
+    if int(mod_version.split('.')[0]) < 5:
+        return []
+
+    private_dir = os.path.join(mod_inc_dir, mod_version)
+    # fallback, let's try to find a directory with the latest version
+    if not os.path.exists(private_dir):
+        dirs = [filename for filename in os.listdir(mod_inc_dir)
+                if os.path.isdir(os.path.join(mod_inc_dir, filename))]
+
+        for dirname in sorted(dirs, reverse=True):
+            if len(dirname.split('.')) == 3:
+                private_dir = dirname
+                break
+    return [private_dir, os.path.join(private_dir, 'Qt' + module)]
+
+
+def get_qmake_host_bins(qvars: T.Dict[str, str]) -> str:
+    # Prefer QT_HOST_BINS (qt5, correct for cross and native compiling)
+    # but fall back to QT_INSTALL_BINS (qt4)
+    if 'QT_HOST_BINS' in qvars:
+        return qvars['QT_HOST_BINS']
+    return qvars['QT_INSTALL_BINS']
+
+
+def get_qmake_host_libexecs(qvars: T.Dict[str, str]) -> T.Optional[str]:
+    if 'QT_HOST_LIBEXECS' in qvars:
+        return qvars['QT_HOST_LIBEXECS']
+    return qvars.get('QT_INSTALL_LIBEXECS')
+
+
+def _get_modules_lib_suffix(version: str, info: 'MachineInfo', is_debug: bool) -> str:
+    """Get the module suffix based on platform and debug type."""
+    suffix = ''
+    if info.is_windows():
+        if is_debug:
+            suffix += 'd'
+        if version.startswith('4'):
+            suffix += '4'
+    if info.is_darwin():
+        if is_debug:
+            suffix += '_debug'
+    if mesonlib.version_compare(version, '>= 5.14.0'):
+        if info.is_android():
+            if info.cpu_family == 'x86':
+                suffix += '_x86'
+            elif info.cpu_family == 'x86_64':
+                suffix += '_x86_64'
+            elif info.cpu_family == 'arm':
+                suffix += '_armeabi-v7a'
+            elif info.cpu_family == 'aarch64':
+                suffix += '_arm64-v8a'
+            else:
+                mlog.warning(f'Android target arch "{info.cpu_family}"" for Qt5 is unknown, '
+                             'module detection may not work')
+    return suffix
+
+
+class QtExtraFrameworkDependency(ExtraFrameworkDependency):
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any], qvars: T.Dict[str, str], language: T.Optional[str] = None):
+        super().__init__(name, env, kwargs, language=language)
+        self.mod_name = name[2:]
+        self.qt_extra_include_directory = qvars['QT_INSTALL_HEADERS']
+
+    def get_compile_args(self, with_private_headers: bool = False, qt_version: str = "0") -> T.List[str]:
+        if self.found():
+            mod_inc_dir = os.path.join(self.framework_path, 'Headers')
+            args = ['-I' + mod_inc_dir]
+            if with_private_headers:
+                args += ['-I' + dirname for dirname in _qt_get_private_includes(mod_inc_dir, self.mod_name, qt_version)]
+            if self.qt_extra_include_directory:
+                args += ['-I' + self.qt_extra_include_directory]
+            return args
+        return []
+
+
+class _QtBase:
+
+    """Mixin class for shared components between PkgConfig and Qmake."""
+
+    link_args: T.List[str]
+    clib_compiler: T.Union['MissingCompiler', 'Compiler']
+    env: 'Environment'
+    libexecdir: T.Optional[str] = None
+
+    def __init__(self, name: str, kwargs: T.Dict[str, T.Any]):
+        self.name = name
+        self.qtname = name.capitalize()
+        self.qtver = name[-1]
+        if self.qtver == "4":
+            self.qtpkgname = 'Qt'
+        else:
+            self.qtpkgname = self.qtname
+
+        self.private_headers = T.cast('bool', kwargs.get('private_headers', False))
+
+        self.requested_modules = mesonlib.stringlistify(mesonlib.extract_as_list(kwargs, 'modules'))
+        if not self.requested_modules:
+            raise DependencyException('No ' + self.qtname + '  modules specified.')
+
+        self.qtmain = T.cast('bool', kwargs.get('main', False))
+        if not isinstance(self.qtmain, bool):
+            raise DependencyException('"main" argument must be a boolean')
+
+    def _link_with_qt_winmain(self, is_debug: bool, libdir: T.Union[str, T.List[str]]) -> bool:
+        libdir = mesonlib.listify(libdir)  # TODO: shouldn't be necessary
+        base_name = self.get_qt_winmain_base_name(is_debug)
+        qt_winmain = self.clib_compiler.find_library(base_name, self.env, libdir)
+        if qt_winmain:
+            self.link_args.append(qt_winmain[0])
+            return True
+        return False
+
+    def get_qt_winmain_base_name(self, is_debug: bool) -> str:
+        return 'qtmaind' if is_debug else 'qtmain'
+
+    def get_exe_args(self, compiler: 'Compiler') -> T.List[str]:
+        # Originally this was -fPIE but nowadays the default
+        # for upstream and distros seems to be -reduce-relocations
+        # which requires -fPIC. This may cause a performance
+        # penalty when using self-built Qt or on platforms
+        # where -fPIC is not required. If this is an issue
+        # for you, patches are welcome.
+        return compiler.get_pic_args()
+
+    def log_details(self) -> str:
+        return f'modules: {", ".join(sorted(self.requested_modules))}'
+
+
+class QtPkgConfigDependency(_QtBase, PkgConfigDependency, metaclass=abc.ABCMeta):
+
+    """Specialization of the PkgConfigDependency for Qt."""
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        _QtBase.__init__(self, name, kwargs)
+
+        # Always use QtCore as the "main" dependency, since it has the extra
+        # pkg-config variables that a user would expect to get. If "Core" is
+        # not a requested module, delete the compile and link arguments to
+        # avoid linking with something they didn't ask for
+        PkgConfigDependency.__init__(self, self.qtpkgname + 'Core', env, kwargs)
+        if 'Core' not in self.requested_modules:
+            self.compile_args = []
+            self.link_args = []
+
+        for m in self.requested_modules:
+            mod = PkgConfigDependency(self.qtpkgname + m, self.env, kwargs, language=self.language)
+            if not mod.found():
+                self.is_found = False
+                return
+            if self.private_headers:
+                qt_inc_dir = mod.get_pkgconfig_variable('includedir', [], None)
+                mod_private_dir = os.path.join(qt_inc_dir, 'Qt' + m)
+                if not os.path.isdir(mod_private_dir):
+                    # At least some versions of homebrew don't seem to set this
+                    # up correctly. /usr/local/opt/qt/include/Qt + m_name is a
+                    # symlink to /usr/local/opt/qt/include, but the pkg-config
+                    # file points to /usr/local/Cellar/qt/x.y.z/Headers/, and
+                    # the Qt + m_name there is not a symlink, it's a file
+                    mod_private_dir = qt_inc_dir
+                mod_private_inc = _qt_get_private_includes(mod_private_dir, m, mod.version)
+                for directory in mod_private_inc:
+                    mod.compile_args.append('-I' + directory)
+            self._add_sub_dependency([lambda: mod])
+
+        if self.env.machines[self.for_machine].is_windows() and self.qtmain:
+            # Check if we link with debug binaries
+            debug_lib_name = self.qtpkgname + 'Core' + _get_modules_lib_suffix(self.version, self.env.machines[self.for_machine], True)
+            is_debug = False
+            for arg in self.get_link_args():
+                if arg == f'-l{debug_lib_name}' or arg.endswith(f'{debug_lib_name}.lib') or arg.endswith(f'{debug_lib_name}.a'):
+                    is_debug = True
+                    break
+            libdir = self.get_pkgconfig_variable('libdir', [], None)
+            if not self._link_with_qt_winmain(is_debug, libdir):
+                self.is_found = False
+                return
+
+        self.bindir = self.get_pkgconfig_host_bins(self)
+        if not self.bindir:
+            # If exec_prefix is not defined, the pkg-config file is broken
+            prefix = self.get_pkgconfig_variable('exec_prefix', [], None)
+            if prefix:
+                self.bindir = os.path.join(prefix, 'bin')
+
+        self.libexecdir = self.get_pkgconfig_host_libexecs(self)
+
+    @staticmethod
+    @abc.abstractmethod
+    def get_pkgconfig_host_bins(core: PkgConfigDependency) -> T.Optional[str]:
+        pass
+
+    @staticmethod
+    @abc.abstractmethod
+    def get_pkgconfig_host_libexecs(core: PkgConfigDependency) -> T.Optional[str]:
+        pass
+
+    @abc.abstractmethod
+    def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+        pass
+
+    def log_info(self) -> str:
+        return 'pkg-config'
+
+
+class QmakeQtDependency(_QtBase, ConfigToolDependency, metaclass=abc.ABCMeta):
+
+    """Find Qt using Qmake as a config-tool."""
+
+    version_arg = '-v'
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        _QtBase.__init__(self, name, kwargs)
+        self.tool_name = f'qmake{self.qtver}'
+        self.tools = [f'qmake{self.qtver}', f'qmake-{self.name}', 'qmake']
+
+        # Add additional constraints that the Qt version is met, but preserve
+        # any version requirements the user has set as well. For example, if Qt5
+        # is requested, add "">= 5, < 6", but if the user has ">= 5.6", don't
+        # lose that.
+        kwargs = kwargs.copy()
+        _vers = mesonlib.listify(kwargs.get('version', []))
+        _vers.extend([f'>= {self.qtver}', f'< {int(self.qtver) + 1}'])
+        kwargs['version'] = _vers
+
+        ConfigToolDependency.__init__(self, name, env, kwargs)
+        if not self.found():
+            return
+
+        # Query library path, header path, and binary path
+        stdo = self.get_config_value(['-query'], 'args')
+        qvars: T.Dict[str, str] = {}
+        for line in stdo:
+            line = line.strip()
+            if line == '':
+                continue
+            k, v = line.split(':', 1)
+            qvars[k] = v
+        # Qt on macOS uses a framework, but Qt for iOS/tvOS does not
+        xspec = qvars.get('QMAKE_XSPEC', '')
+        if self.env.machines.host.is_darwin() and not any(s in xspec for s in ['ios', 'tvos']):
+            mlog.debug("Building for macOS, looking for framework")
+            self._framework_detect(qvars, self.requested_modules, kwargs)
+            # Sometimes Qt is built not as a framework (for instance, when using conan pkg manager)
+            # skip and fall back to normal procedure then
+            if self.is_found:
+                return
+            else:
+                mlog.debug("Building for macOS, couldn't find framework, falling back to library search")
+        incdir = qvars['QT_INSTALL_HEADERS']
+        self.compile_args.append('-I' + incdir)
+        libdir = qvars['QT_INSTALL_LIBS']
+        # Used by qt.compilers_detect()
+        self.bindir = get_qmake_host_bins(qvars)
+        self.libexecdir = get_qmake_host_libexecs(qvars)
+
+        # Use the buildtype by default, but look at the b_vscrt option if the
+        # compiler supports it.
+        is_debug = self.env.coredata.get_option(mesonlib.OptionKey('buildtype')) == 'debug'
+        if mesonlib.OptionKey('b_vscrt') in self.env.coredata.options:
+            if self.env.coredata.options[mesonlib.OptionKey('b_vscrt')].value in {'mdd', 'mtd'}:
+                is_debug = True
+        modules_lib_suffix = _get_modules_lib_suffix(self.version, self.env.machines[self.for_machine], is_debug)
+
+        for module in self.requested_modules:
+            mincdir = os.path.join(incdir, 'Qt' + module)
+            self.compile_args.append('-I' + mincdir)
+
+            if module == 'QuickTest':
+                define_base = 'QMLTEST'
+            elif module == 'Test':
+                define_base = 'TESTLIB'
+            else:
+                define_base = module.upper()
+            self.compile_args.append(f'-DQT_{define_base}_LIB')
+
+            if self.private_headers:
+                priv_inc = self.get_private_includes(mincdir, module)
+                for directory in priv_inc:
+                    self.compile_args.append('-I' + directory)
+            libfiles = self.clib_compiler.find_library(
+                self.qtpkgname + module + modules_lib_suffix, self.env,
+                mesonlib.listify(libdir)) # TODO: shouldn't be necessary
+            if libfiles:
+                libfile = libfiles[0]
+            else:
+                mlog.log("Could not find:", module,
+                         self.qtpkgname + module + modules_lib_suffix,
+                         'in', libdir)
+                self.is_found = False
+                break
+            self.link_args.append(libfile)
+
+        if self.env.machines[self.for_machine].is_windows() and self.qtmain:
+            if not self._link_with_qt_winmain(is_debug, libdir):
+                self.is_found = False
+
+    def _sanitize_version(self, version: str) -> str:
+        m = re.search(rf'({self.qtver}(\.\d+)+)', version)
+        if m:
+            return m.group(0).rstrip('.')
+        return version
+
+    @abc.abstractmethod
+    def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+        pass
+
+    def _framework_detect(self, qvars: T.Dict[str, str], modules: T.List[str], kwargs: T.Dict[str, T.Any]) -> None:
+        libdir = qvars['QT_INSTALL_LIBS']
+
+        # ExtraFrameworkDependency doesn't support any methods
+        fw_kwargs = kwargs.copy()
+        fw_kwargs.pop('method', None)
+        fw_kwargs['paths'] = [libdir]
+
+        for m in modules:
+            fname = 'Qt' + m
+            mlog.debug('Looking for qt framework ' + fname)
+            fwdep = QtExtraFrameworkDependency(fname, self.env, fw_kwargs, qvars, language=self.language)
+            if fwdep.found():
+                self.compile_args.append('-F' + libdir)
+                self.compile_args += fwdep.get_compile_args(with_private_headers=self.private_headers,
+                                                            qt_version=self.version)
+                self.link_args += fwdep.get_link_args()
+            else:
+                self.is_found = False
+                break
+        else:
+            self.is_found = True
+            # Used by self.compilers_detect()
+            self.bindir = get_qmake_host_bins(qvars)
+            self.libexecdir = get_qmake_host_libexecs(qvars)
+
+    def log_info(self) -> str:
+        return 'qmake'
+
+
+class Qt6WinMainMixin:
+
+    def get_qt_winmain_base_name(self, is_debug: bool) -> str:
+        return 'Qt6EntryPointd' if is_debug else 'Qt6EntryPoint'
+
+
+class Qt4ConfigToolDependency(QmakeQtDependency):
+
+    def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+        return []
+
+
+class Qt5ConfigToolDependency(QmakeQtDependency):
+
+    def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+        return _qt_get_private_includes(mod_inc_dir, module, self.version)
+
+
+class Qt6ConfigToolDependency(Qt6WinMainMixin, QmakeQtDependency):
+
+    def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+        return _qt_get_private_includes(mod_inc_dir, module, self.version)
+
+
+class Qt4PkgConfigDependency(QtPkgConfigDependency):
+
+    @staticmethod
+    def get_pkgconfig_host_bins(core: PkgConfigDependency) -> T.Optional[str]:
+        # Only return one bins dir, because the tools are generally all in one
+        # directory for Qt4, in Qt5, they must all be in one directory. Return
+        # the first one found among the bin variables, in case one tool is not
+        # configured to be built.
+        applications = ['moc', 'uic', 'rcc', 'lupdate', 'lrelease']
+        for application in applications:
+            try:
+                return os.path.dirname(core.get_pkgconfig_variable(f'{application}_location', [], None))
+            except mesonlib.MesonException:
+                pass
+        return None
+
+    def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+        return []
+
+    @staticmethod
+    def get_pkgconfig_host_libexecs(core: PkgConfigDependency) -> str:
+        return None
+
+
+class Qt5PkgConfigDependency(QtPkgConfigDependency):
+
+    @staticmethod
+    def get_pkgconfig_host_bins(core: PkgConfigDependency) -> str:
+        return core.get_pkgconfig_variable('host_bins', [], None)
+
+    @staticmethod
+    def get_pkgconfig_host_libexecs(core: PkgConfigDependency) -> str:
+        return None
+
+    def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+        return _qt_get_private_includes(mod_inc_dir, module, self.version)
+
+
+class Qt6PkgConfigDependency(Qt6WinMainMixin, QtPkgConfigDependency):
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, env, kwargs)
+        if not self.libexecdir:
+            mlog.debug(f'detected Qt6 {self.version} pkg-config dependency does not '
+                       'have proper tools support, ignoring')
+            self.is_found = False
+
+    @staticmethod
+    def get_pkgconfig_host_bins(core: PkgConfigDependency) -> str:
+        return core.get_pkgconfig_variable('bindir', [], None)
+
+    @staticmethod
+    def get_pkgconfig_host_libexecs(core: PkgConfigDependency) -> str:
+        # Qt6 pkg-config for Qt defines libexecdir from 6.3+
+        return core.get_pkgconfig_variable('libexecdir', [], None)
+
+    def get_private_includes(self, mod_inc_dir: str, module: str) -> T.List[str]:
+        return _qt_get_private_includes(mod_inc_dir, module, self.version)
+
+
+packages['qt4'] = qt4_factory = DependencyFactory(
+    'qt4',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    pkgconfig_class=Qt4PkgConfigDependency,
+    configtool_class=Qt4ConfigToolDependency,
+)
+
+packages['qt5'] = qt5_factory = DependencyFactory(
+    'qt5',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    pkgconfig_class=Qt5PkgConfigDependency,
+    configtool_class=Qt5ConfigToolDependency,
+)
+
+packages['qt6'] = qt6_factory = DependencyFactory(
+    'qt6',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL],
+    pkgconfig_class=Qt6PkgConfigDependency,
+    configtool_class=Qt6ConfigToolDependency,
+)
diff --git a/vendored-meson/meson/mesonbuild/dependencies/scalapack.py b/vendored-meson/meson/mesonbuild/dependencies/scalapack.py
new file mode 100644
index 000000000000..257e4aaaa7b3
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/scalapack.py
@@ -0,0 +1,159 @@
+# Copyright 2013-2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from pathlib import Path
+import functools
+import os
+import typing as T
+
+from ..mesonlib import OptionKey
+from .base import DependencyMethods
+from .base import DependencyException
+from .cmake import CMakeDependency
+from .detect import packages
+from .pkgconfig import PkgConfigDependency
+from .factory import factory_methods
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment, MachineChoice
+    from .factory import DependencyGenerator
+
+
+@factory_methods({DependencyMethods.PKGCONFIG, DependencyMethods.CMAKE})
+def scalapack_factory(env: 'Environment', for_machine: 'MachineChoice',
+                      kwargs: T.Dict[str, T.Any],
+                      methods: T.List[DependencyMethods]) -> T.List['DependencyGenerator']:
+    candidates: T.List['DependencyGenerator'] = []
+
+    if DependencyMethods.PKGCONFIG in methods:
+        static_opt = kwargs.get('static', env.coredata.get_option(OptionKey('prefer_static')))
+        mkl = 'mkl-static-lp64-iomp' if static_opt else 'mkl-dynamic-lp64-iomp'
+        candidates.append(functools.partial(
+            MKLPkgConfigDependency, mkl, env, kwargs))
+
+        for pkg in ['scalapack-openmpi', 'scalapack']:
+            candidates.append(functools.partial(
+                PkgConfigDependency, pkg, env, kwargs))
+
+    if DependencyMethods.CMAKE in methods:
+        candidates.append(functools.partial(
+            CMakeDependency, 'Scalapack', env, kwargs))
+
+    return candidates
+
+packages['scalapack'] = scalapack_factory
+
+
+class MKLPkgConfigDependency(PkgConfigDependency):
+
+    """PkgConfigDependency for Intel MKL.
+
+    MKL's pkg-config is pretty much borked in every way. We need to apply a
+    bunch of fixups to make it work correctly.
+    """
+
+    def __init__(self, name: str, env: 'Environment', kwargs: T.Dict[str, T.Any],
+                 language: T.Optional[str] = None):
+        _m = os.environ.get('MKLROOT')
+        self.__mklroot = Path(_m).resolve() if _m else None
+
+        # We need to call down into the normal super() method even if we don't
+        # find mklroot, otherwise we won't have all of the instance variables
+        # initialized that meson expects.
+        super().__init__(name, env, kwargs, language=language)
+
+        # Doesn't work with gcc on windows, but does on Linux
+        if (not self.__mklroot or (env.machines[self.for_machine].is_windows()
+                                   and self.clib_compiler.id == 'gcc')):
+            self.is_found = False
+
+        # This can happen either because we're using GCC, we couldn't find the
+        # mklroot, or the pkg-config couldn't find it.
+        if not self.is_found:
+            return
+
+        assert self.version != '', 'This should not happen if we didn\'t return above'
+
+        if self.version == 'unknown':
+            # At least by 2020 the version is in the pkg-config, just not with
+            # the correct name
+            v = self.get_variable(pkgconfig='Version', default_value='')
+
+            if not v and self.__mklroot:
+                try:
+                    v = (
+                        self.__mklroot.as_posix()
+                        .split('compilers_and_libraries_')[1]
+                        .split('/', 1)[0]
+                    )
+                except IndexError:
+                    pass
+
+            if v:
+                assert isinstance(v, str)
+                self.version = v
+
+    def _set_libs(self) -> None:
+        super()._set_libs()
+
+        if self.env.machines[self.for_machine].is_windows():
+            suffix = '.lib'
+        elif self.static:
+            suffix = '.a'
+        else:
+            suffix = ''
+        libdir = self.__mklroot / 'lib/intel64'
+
+        if self.clib_compiler.id == 'gcc':
+            for i, a in enumerate(self.link_args):
+                # only replace in filename, not in directory names
+                dirname, basename = os.path.split(a)
+                if 'mkl_intel_lp64' in basename:
+                    basename = basename.replace('intel', 'gf')
+                    self.link_args[i] = '/' + os.path.join(dirname, basename)
+        # MKL pkg-config omits scalapack
+        # be sure "-L" and "-Wl" are first if present
+        i = 0
+        for j, a in enumerate(self.link_args):
+            if a.startswith(('-L', '-Wl')):
+                i = j + 1
+            elif j > 3:
+                break
+        if self.env.machines[self.for_machine].is_windows() or self.static:
+            self.link_args.insert(
+                i, str(libdir / ('mkl_scalapack_lp64' + suffix))
+            )
+            self.link_args.insert(
+                i + 1, str(libdir / ('mkl_blacs_intelmpi_lp64' + suffix))
+            )
+        else:
+            self.link_args.insert(i, '-lmkl_scalapack_lp64')
+            self.link_args.insert(i + 1, '-lmkl_blacs_intelmpi_lp64')
+
+    def _set_cargs(self) -> None:
+        env = None
+        if self.language == 'fortran':
+            # gfortran doesn't appear to look in system paths for INCLUDE files,
+            # so don't allow pkg-config to suppress -I flags for system paths
+            env = os.environ.copy()
+            env['PKG_CONFIG_ALLOW_SYSTEM_CFLAGS'] = '1'
+        ret, out, err = self._call_pkgbin([
+            '--cflags', self.name,
+            '--define-variable=prefix=' + self.__mklroot.as_posix()],
+            env=env)
+        if ret != 0:
+            raise DependencyException('Could not generate cargs for %s:\n%s\n' %
+                                      (self.name, err))
+        self.compile_args = self._convert_mingw_paths(self._split_args(out))
diff --git a/vendored-meson/meson/mesonbuild/dependencies/ui.py b/vendored-meson/meson/mesonbuild/dependencies/ui.py
new file mode 100644
index 000000000000..1dffa1f6b9e0
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/dependencies/ui.py
@@ -0,0 +1,268 @@
+# Copyright 2013-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies that
+# are UI-related.
+from __future__ import annotations
+
+import os
+import subprocess
+import typing as T
+
+from .. import mlog
+from .. import mesonlib
+from ..mesonlib import (
+    Popen_safe, extract_as_list, version_compare_many
+)
+from ..environment import detect_cpu_family
+
+from .base import DependencyException, DependencyMethods, DependencyTypeName, SystemDependency
+from .configtool import ConfigToolDependency
+from .detect import packages
+from .factory import DependencyFactory
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+
+
+class GLDependencySystem(SystemDependency):
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__(name, environment, kwargs)
+
+        if self.env.machines[self.for_machine].is_darwin():
+            self.is_found = True
+            # FIXME: Use AppleFrameworks dependency
+            self.link_args = ['-framework', 'OpenGL']
+            # FIXME: Detect version using self.clib_compiler
+            return
+        elif self.env.machines[self.for_machine].is_windows():
+            self.is_found = True
+            # FIXME: Use self.clib_compiler.find_library()
+            self.link_args = ['-lopengl32']
+            # FIXME: Detect version using self.clib_compiler
+            return
+        else:
+            links = self.clib_compiler.find_library('GL', environment, [])
+            has_header = self.clib_compiler.has_header('GL/gl.h', '', environment)[0]
+            if links and has_header:
+                self.is_found = True
+                self.link_args = links
+            elif links:
+                raise DependencyException('Found GL runtime library but no development header files')
+
+class GnuStepDependency(ConfigToolDependency):
+
+    tools = ['gnustep-config']
+    tool_name = 'gnustep-config'
+
+    def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]) -> None:
+        super().__init__('gnustep', environment, kwargs, language='objc')
+        if not self.is_found:
+            return
+        self.modules = kwargs.get('modules', [])
+        self.compile_args = self.filter_args(
+            self.get_config_value(['--objc-flags'], 'compile_args'))
+        self.link_args = self.weird_filter(self.get_config_value(
+            ['--gui-libs' if 'gui' in self.modules else '--base-libs'],
+            'link_args'))
+
+    def find_config(self, versions: T.Optional[T.List[str]] = None, returncode: int = 0) -> T.Tuple[T.Optional[T.List[str]], T.Optional[str]]:
+        tool = [self.tools[0]]
+        try:
+            p, out = Popen_safe(tool + ['--help'])[:2]
+        except (FileNotFoundError, PermissionError):
+            return (None, None)
+        if p.returncode != returncode:
+            return (None, None)
+        self.config = tool
+        found_version = self.detect_version()
+        if versions and not version_compare_many(found_version, versions)[0]:
+            return (None, found_version)
+
+        return (tool, found_version)
+
+    @staticmethod
+    def weird_filter(elems: T.List[str]) -> T.List[str]:
+        """When building packages, the output of the enclosing Make is
+        sometimes mixed among the subprocess output. I have no idea why. As a
+        hack filter out everything that is not a flag.
+        """
+        return [e for e in elems if e.startswith('-')]
+
+    @staticmethod
+    def filter_args(args: T.List[str]) -> T.List[str]:
+        """gnustep-config returns a bunch of garbage args such as -O2 and so
+        on. Drop everything that is not needed.
+        """
+        result = []
+        for f in args:
+            if f.startswith('-D') \
+                    or f.startswith('-f') \
+                    or f.startswith('-I') \
+                    or f == '-pthread' \
+                    or (f.startswith('-W') and not f == '-Wall'):
+                result.append(f)
+        return result
+
+    def detect_version(self) -> str:
+        gmake = self.get_config_value(['--variable=GNUMAKE'], 'variable')[0]
+        makefile_dir = self.get_config_value(['--variable=GNUSTEP_MAKEFILES'], 'variable')[0]
+        # This Makefile has the GNUStep version set
+        base_make = os.path.join(makefile_dir, 'Additional', 'base.make')
+        # Print the Makefile variable passed as the argument. For instance, if
+        # you run the make target `print-SOME_VARIABLE`, this will print the
+        # value of the variable `SOME_VARIABLE`.
+        printver = "print-%:\n\t@echo '$($*)'"
+        env = os.environ.copy()
+        # See base.make to understand why this is set
+        env['FOUNDATION_LIB'] = 'gnu'
+        p, o, e = Popen_safe([gmake, '-f', '-', '-f', base_make,
+                              'print-GNUSTEP_BASE_VERSION'],
+                             env=env, write=printver, stdin=subprocess.PIPE)
+        version = o.strip()
+        if not version:
+            mlog.debug("Couldn't detect GNUStep version, falling back to '1'")
+            # Fallback to setting some 1.x version
+            version = '1'
+        return version
+
+packages['gnustep'] = GnuStepDependency
+
+
+class SDL2DependencyConfigTool(ConfigToolDependency):
+
+    tools = ['sdl2-config']
+    tool_name = 'sdl2-config'
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__(name, environment, kwargs)
+        if not self.is_found:
+            return
+        self.compile_args = self.get_config_value(['--cflags'], 'compile_args')
+        self.link_args = self.get_config_value(['--libs'], 'link_args')
+
+
+class WxDependency(ConfigToolDependency):
+
+    tools = ['wx-config-3.0', 'wx-config-3.1', 'wx-config', 'wx-config-gtk3']
+    tool_name = 'wx-config'
+
+    def __init__(self, environment: 'Environment', kwargs: T.Dict[str, T.Any]):
+        super().__init__('WxWidgets', environment, kwargs, language='cpp')
+        if not self.is_found:
+            return
+        self.requested_modules = self.get_requested(kwargs)
+
+        extra_args = []
+        if self.static:
+            extra_args.append('--static=yes')
+
+            # Check to make sure static is going to work
+            err = Popen_safe(self.config + extra_args)[2]
+            if 'No config found to match' in err:
+                mlog.debug('WxWidgets is missing static libraries.')
+                self.is_found = False
+                return
+
+        # wx-config seems to have a cflags as well but since it requires C++,
+        # this should be good, at least for now.
+        self.compile_args = self.get_config_value(['--cxxflags'] + extra_args + self.requested_modules, 'compile_args')
+        self.link_args = self.get_config_value(['--libs'] + extra_args + self.requested_modules, 'link_args')
+
+    @staticmethod
+    def get_requested(kwargs: T.Dict[str, T.Any]) -> T.List[str]:
+        if 'modules' not in kwargs:
+            return []
+        candidates = extract_as_list(kwargs, 'modules')
+        for c in candidates:
+            if not isinstance(c, str):
+                raise DependencyException('wxwidgets module argument is not a string')
+        return candidates
+
+packages['wxwidgets'] = WxDependency
+
+class VulkanDependencySystem(SystemDependency):
+
+    def __init__(self, name: str, environment: 'Environment', kwargs: T.Dict[str, T.Any], language: T.Optional[str] = None) -> None:
+        super().__init__(name, environment, kwargs, language=language)
+
+        try:
+            self.vulkan_sdk = os.environ['VULKAN_SDK']
+            if not os.path.isabs(self.vulkan_sdk):
+                raise DependencyException('VULKAN_SDK must be an absolute path.')
+        except KeyError:
+            self.vulkan_sdk = None
+
+        if self.vulkan_sdk:
+            # TODO: this config might not work on some platforms, fix bugs as reported
+            # we should at least detect other 64-bit platforms (e.g. armv8)
+            lib_name = 'vulkan'
+            lib_dir = 'lib'
+            inc_dir = 'include'
+            if mesonlib.is_windows():
+                lib_name = 'vulkan-1'
+                lib_dir = 'Lib32'
+                inc_dir = 'Include'
+                if detect_cpu_family(self.env.coredata.compilers.host) == 'x86_64':
+                    lib_dir = 'Lib'
+
+            # make sure header and lib are valid
+            inc_path = os.path.join(self.vulkan_sdk, inc_dir)
+            header = os.path.join(inc_path, 'vulkan', 'vulkan.h')
+            lib_path = os.path.join(self.vulkan_sdk, lib_dir)
+            find_lib = self.clib_compiler.find_library(lib_name, environment, [lib_path])
+
+            if not find_lib:
+                raise DependencyException('VULKAN_SDK point to invalid directory (no lib)')
+
+            if not os.path.isfile(header):
+                raise DependencyException('VULKAN_SDK point to invalid directory (no include)')
+
+            # XXX: this is very odd, and may deserve being removed
+            self.type_name = DependencyTypeName('vulkan_sdk')
+            self.is_found = True
+            self.compile_args.append('-I' + inc_path)
+            self.link_args.append('-L' + lib_path)
+            self.link_args.append('-l' + lib_name)
+
+            # TODO: find a way to retrieve the version from the sdk?
+            # Usually it is a part of the path to it (but does not have to be)
+            return
+        else:
+            # simply try to guess it, usually works on linux
+            libs = self.clib_compiler.find_library('vulkan', environment, [])
+            if libs is not None and self.clib_compiler.has_header('vulkan/vulkan.h', '', environment, disable_cache=True)[0]:
+                self.is_found = True
+                for lib in libs:
+                    self.link_args.append(lib)
+                return
+
+packages['gl'] = gl_factory = DependencyFactory(
+    'gl',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+    system_class=GLDependencySystem,
+)
+
+packages['sdl2'] = sdl2_factory = DependencyFactory(
+    'sdl2',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.CONFIG_TOOL, DependencyMethods.EXTRAFRAMEWORK, DependencyMethods.CMAKE],
+    configtool_class=SDL2DependencyConfigTool,
+    cmake_name='SDL2',
+)
+
+packages['vulkan'] = vulkan_factory = DependencyFactory(
+    'vulkan',
+    [DependencyMethods.PKGCONFIG, DependencyMethods.SYSTEM],
+    system_class=VulkanDependencySystem,
+)
diff --git a/vendored-meson/meson/mesonbuild/depfile.py b/vendored-meson/meson/mesonbuild/depfile.py
new file mode 100644
index 000000000000..d346136edc6f
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/depfile.py
@@ -0,0 +1,91 @@
+# Copyright 2019 Red Hat, Inc.
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import typing as T
+
+
+def parse(lines: T.Iterable[str]) -> T.List[T.Tuple[T.List[str], T.List[str]]]:
+    rules: T.List[T.Tuple[T.List[str], T.List[str]]] = []
+    targets: T.List[str] = []
+    deps: T.List[str] = []
+    in_deps = False
+    out = ''
+    for line in lines:
+        if not line.endswith('\n'):
+            line += '\n'
+        escape = None
+        for c in line:
+            if escape:
+                if escape == '$' and c != '$':
+                    out += '$'
+                if escape == '\\' and c == '\n':
+                    continue
+                out += c
+                escape = None
+                continue
+            if c in {'\\', '$'}:
+                escape = c
+                continue
+            elif c in {' ', '\n'}:
+                if out != '':
+                    if in_deps:
+                        deps.append(out)
+                    else:
+                        targets.append(out)
+                out = ''
+                if c == '\n':
+                    rules.append((targets, deps))
+                    targets = []
+                    deps = []
+                    in_deps = False
+                continue
+            elif c == ':':
+                targets.append(out)
+                out = ''
+                in_deps = True
+                continue
+            out += c
+    return rules
+
+class Target(T.NamedTuple):
+
+    deps: T.Set[str]
+
+
+class DepFile:
+    def __init__(self, lines: T.Iterable[str]):
+        rules = parse(lines)
+        depfile: T.Dict[str, Target] = {}
+        for (targets, deps) in rules:
+            for target in targets:
+                t = depfile.setdefault(target, Target(deps=set()))
+                for dep in deps:
+                    t.deps.add(dep)
+        self.depfile = depfile
+
+    def get_all_dependencies(self, name: str, visited: T.Optional[T.Set[str]] = None) -> T.List[str]:
+        deps: T.Set[str] = set()
+        if not visited:
+            visited = set()
+        if name in visited:
+            return []
+        visited.add(name)
+
+        target = self.depfile.get(name)
+        if not target:
+            return []
+        deps.update(target.deps)
+        for dep in target.deps:
+            deps.update(self.get_all_dependencies(dep, visited))
+        return sorted(deps)
diff --git a/vendored-meson/meson/mesonbuild/envconfig.py b/vendored-meson/meson/mesonbuild/envconfig.py
new file mode 100644
index 000000000000..7e0c56703121
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/envconfig.py
@@ -0,0 +1,473 @@
+# Copyright 2012-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from dataclasses import dataclass
+import subprocess
+import typing as T
+from enum import Enum
+
+from . import mesonlib
+from .mesonlib import EnvironmentException, HoldableObject
+from . import mlog
+from pathlib import Path
+
+
+# These classes contains all the data pulled from configuration files (native
+# and cross file currently), and also assists with the reading environment
+# variables.
+#
+# At this time there isn't an ironclad difference between this and other sources
+# of state like `coredata`. But one rough guide is much what is in `coredata` is
+# the *output* of the configuration process: the final decisions after tests.
+# This, on the other hand has *inputs*. The config files are parsed, but
+# otherwise minimally transformed. When more complex fallbacks (environment
+# detection) exist, they are defined elsewhere as functions that construct
+# instances of these classes.
+
+
+known_cpu_families = (
+    'aarch64',
+    'alpha',
+    'arc',
+    'arm',
+    'avr',
+    'c2000',
+    'csky',
+    'dspic',
+    'e2k',
+    'ft32',
+    'ia64',
+    'loongarch64',
+    'm68k',
+    'microblaze',
+    'mips',
+    'mips64',
+    'msp430',
+    'parisc',
+    'pic24',
+    'ppc',
+    'ppc64',
+    'riscv32',
+    'riscv64',
+    'rl78',
+    'rx',
+    's390',
+    's390x',
+    'sh4',
+    'sparc',
+    'sparc64',
+    'wasm32',
+    'wasm64',
+    'x86',
+    'x86_64',
+)
+
+# It would feel more natural to call this "64_BIT_CPU_FAMILIES", but
+# python identifiers cannot start with numbers
+CPU_FAMILIES_64_BIT = [
+    'aarch64',
+    'alpha',
+    'ia64',
+    'loongarch64',
+    'mips64',
+    'ppc64',
+    'riscv64',
+    's390x',
+    'sparc64',
+    'wasm64',
+    'x86_64',
+]
+
+# Map from language identifiers to environment variables.
+ENV_VAR_COMPILER_MAP: T.Mapping[str, str] = {
+    # Compilers
+    'c': 'CC',
+    'cpp': 'CXX',
+    'cs': 'CSC',
+    'd': 'DC',
+    'fortran': 'FC',
+    'objc': 'OBJC',
+    'objcpp': 'OBJCXX',
+    'rust': 'RUSTC',
+    'vala': 'VALAC',
+    'nasm': 'NASM',
+
+    # Linkers
+    'c_ld': 'CC_LD',
+    'cpp_ld': 'CXX_LD',
+    'd_ld': 'DC_LD',
+    'fortran_ld': 'FC_LD',
+    'objc_ld': 'OBJC_LD',
+    'objcpp_ld': 'OBJCXX_LD',
+    'rust_ld': 'RUSTC_LD',
+}
+
+# Map from utility names to environment variables.
+ENV_VAR_TOOL_MAP: T.Mapping[str, str] = {
+    # Binutils
+    'ar': 'AR',
+    'as': 'AS',
+    'ld': 'LD',
+    'nm': 'NM',
+    'objcopy': 'OBJCOPY',
+    'objdump': 'OBJDUMP',
+    'ranlib': 'RANLIB',
+    'readelf': 'READELF',
+    'size': 'SIZE',
+    'strings': 'STRINGS',
+    'strip': 'STRIP',
+    'windres': 'WINDRES',
+
+    # Other tools
+    'cmake': 'CMAKE',
+    'qmake': 'QMAKE',
+    'pkgconfig': 'PKG_CONFIG',
+    'pkg-config': 'PKG_CONFIG',
+    'make': 'MAKE',
+    'vapigen': 'VAPIGEN',
+    'llvm-config': 'LLVM_CONFIG',
+}
+
+ENV_VAR_PROG_MAP = {**ENV_VAR_COMPILER_MAP, **ENV_VAR_TOOL_MAP}
+
+# Deprecated environment variables mapped from the new variable to the old one
+# Deprecated in 0.54.0
+DEPRECATED_ENV_PROG_MAP: T.Mapping[str, str] = {
+    'd_ld': 'D_LD',
+    'fortran_ld': 'F_LD',
+    'rust_ld': 'RUST_LD',
+    'objcpp_ld': 'OBJCPP_LD',
+}
+
+class CMakeSkipCompilerTest(Enum):
+    ALWAYS = 'always'
+    NEVER = 'never'
+    DEP_ONLY = 'dep_only'
+
+class Properties:
+    def __init__(
+            self,
+            properties: T.Optional[T.Dict[str, T.Optional[T.Union[str, bool, int, T.List[str]]]]] = None,
+    ):
+        self.properties = properties or {}  # type: T.Dict[str, T.Optional[T.Union[str, bool, int, T.List[str]]]]
+
+    def has_stdlib(self, language: str) -> bool:
+        return language + '_stdlib' in self.properties
+
+    # Some of get_stdlib, get_root, get_sys_root are wider than is actually
+    # true, but without heterogeneous dict annotations it's not practical to
+    # narrow them
+    def get_stdlib(self, language: str) -> T.Union[str, T.List[str]]:
+        stdlib = self.properties[language + '_stdlib']
+        if isinstance(stdlib, str):
+            return stdlib
+        assert isinstance(stdlib, list)
+        for i in stdlib:
+            assert isinstance(i, str)
+        return stdlib
+
+    def get_root(self) -> T.Optional[str]:
+        root = self.properties.get('root', None)
+        assert root is None or isinstance(root, str)
+        return root
+
+    def get_sys_root(self) -> T.Optional[str]:
+        sys_root = self.properties.get('sys_root', None)
+        assert sys_root is None or isinstance(sys_root, str)
+        return sys_root
+
+    def get_pkg_config_libdir(self) -> T.Optional[T.List[str]]:
+        p = self.properties.get('pkg_config_libdir', None)
+        if p is None:
+            return p
+        res = mesonlib.listify(p)
+        for i in res:
+            assert isinstance(i, str)
+        return res
+
+    def get_cmake_defaults(self) -> bool:
+        if 'cmake_defaults' not in self.properties:
+            return True
+        res = self.properties['cmake_defaults']
+        assert isinstance(res, bool)
+        return res
+
+    def get_cmake_toolchain_file(self) -> T.Optional[Path]:
+        if 'cmake_toolchain_file' not in self.properties:
+            return None
+        raw = self.properties['cmake_toolchain_file']
+        assert isinstance(raw, str)
+        cmake_toolchain_file = Path(raw)
+        if not cmake_toolchain_file.is_absolute():
+            raise EnvironmentException(f'cmake_toolchain_file ({raw}) is not absolute')
+        return cmake_toolchain_file
+
+    def get_cmake_skip_compiler_test(self) -> CMakeSkipCompilerTest:
+        if 'cmake_skip_compiler_test' not in self.properties:
+            return CMakeSkipCompilerTest.DEP_ONLY
+        raw = self.properties['cmake_skip_compiler_test']
+        assert isinstance(raw, str)
+        try:
+            return CMakeSkipCompilerTest(raw)
+        except ValueError:
+            raise EnvironmentException(
+                '"{}" is not a valid value for cmake_skip_compiler_test. Supported values are {}'
+                .format(raw, [e.value for e in CMakeSkipCompilerTest]))
+
+    def get_cmake_use_exe_wrapper(self) -> bool:
+        if 'cmake_use_exe_wrapper' not in self.properties:
+            return True
+        res = self.properties['cmake_use_exe_wrapper']
+        assert isinstance(res, bool)
+        return res
+
+    def get_java_home(self) -> T.Optional[Path]:
+        value = T.cast('T.Optional[str]', self.properties.get('java_home'))
+        return Path(value) if value else None
+
+    def get_bindgen_clang_args(self) -> T.List[str]:
+        value = mesonlib.listify(self.properties.get('bindgen_clang_arguments', []))
+        if not all(isinstance(v, str) for v in value):
+            raise EnvironmentException('bindgen_clang_arguments must be a string or an array of strings')
+        return T.cast('T.List[str]', value)
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, type(self)):
+            return self.properties == other.properties
+        return NotImplemented
+
+    # TODO consider removing so Properties is less freeform
+    def __getitem__(self, key: str) -> T.Optional[T.Union[str, bool, int, T.List[str]]]:
+        return self.properties[key]
+
+    # TODO consider removing so Properties is less freeform
+    def __contains__(self, item: T.Union[str, bool, int, T.List[str]]) -> bool:
+        return item in self.properties
+
+    # TODO consider removing, for same reasons as above
+    def get(self, key: str, default: T.Optional[T.Union[str, bool, int, T.List[str]]] = None) -> T.Optional[T.Union[str, bool, int, T.List[str]]]:
+        return self.properties.get(key, default)
+
+@dataclass(unsafe_hash=True)
+class MachineInfo(HoldableObject):
+    system: str
+    cpu_family: str
+    cpu: str
+    endian: str
+    kernel: T.Optional[str]
+    subsystem: T.Optional[str]
+
+    def __post_init__(self) -> None:
+        self.is_64_bit: bool = self.cpu_family in CPU_FAMILIES_64_BIT
+
+    def __repr__(self) -> str:
+        return f''
+
+    @classmethod
+    def from_literal(cls, literal: T.Dict[str, str]) -> 'MachineInfo':
+        minimum_literal = {'cpu', 'cpu_family', 'endian', 'system'}
+        if set(literal) < minimum_literal:
+            raise EnvironmentException(
+                f'Machine info is currently {literal}\n' +
+                'but is missing {}.'.format(minimum_literal - set(literal)))
+
+        cpu_family = literal['cpu_family']
+        if cpu_family not in known_cpu_families:
+            mlog.warning(f'Unknown CPU family {cpu_family}, please report this at https://github.com/mesonbuild/meson/issues/new')
+
+        endian = literal['endian']
+        if endian not in ('little', 'big'):
+            mlog.warning(f'Unknown endian {endian}')
+
+        system = literal['system']
+        kernel = literal.get('kernel', None)
+        subsystem = literal.get('subsystem', None)
+
+        return cls(system, cpu_family, literal['cpu'], endian, kernel, subsystem)
+
+    def is_windows(self) -> bool:
+        """
+        Machine is windows?
+        """
+        return self.system == 'windows'
+
+    def is_cygwin(self) -> bool:
+        """
+        Machine is cygwin?
+        """
+        return self.system == 'cygwin'
+
+    def is_linux(self) -> bool:
+        """
+        Machine is linux?
+        """
+        return self.system == 'linux'
+
+    def is_darwin(self) -> bool:
+        """
+        Machine is Darwin (iOS/tvOS/OS X)?
+        """
+        return self.system in {'darwin', 'ios', 'tvos'}
+
+    def is_android(self) -> bool:
+        """
+        Machine is Android?
+        """
+        return self.system == 'android'
+
+    def is_haiku(self) -> bool:
+        """
+        Machine is Haiku?
+        """
+        return self.system == 'haiku'
+
+    def is_netbsd(self) -> bool:
+        """
+        Machine is NetBSD?
+        """
+        return self.system == 'netbsd'
+
+    def is_openbsd(self) -> bool:
+        """
+        Machine is OpenBSD?
+        """
+        return self.system == 'openbsd'
+
+    def is_dragonflybsd(self) -> bool:
+        """Machine is DragonflyBSD?"""
+        return self.system == 'dragonfly'
+
+    def is_freebsd(self) -> bool:
+        """Machine is FreeBSD?"""
+        return self.system == 'freebsd'
+
+    def is_sunos(self) -> bool:
+        """Machine is illumos or Solaris?"""
+        return self.system == 'sunos'
+
+    def is_hurd(self) -> bool:
+        """
+        Machine is GNU/Hurd?
+        """
+        return self.system == 'gnu'
+
+    def is_aix(self) -> bool:
+        """
+        Machine is aix?
+        """
+        return self.system == 'aix'
+
+    def is_irix(self) -> bool:
+        """Machine is IRIX?"""
+        return self.system.startswith('irix')
+
+    # Various prefixes and suffixes for import libraries, shared libraries,
+    # static libraries, and executables.
+    # Versioning is added to these names in the backends as-needed.
+    def get_exe_suffix(self) -> str:
+        if self.is_windows() or self.is_cygwin():
+            return 'exe'
+        else:
+            return ''
+
+    def get_object_suffix(self) -> str:
+        if self.is_windows():
+            return 'obj'
+        else:
+            return 'o'
+
+    def libdir_layout_is_win(self) -> bool:
+        return self.is_windows() or self.is_cygwin()
+
+class BinaryTable:
+
+    def __init__(
+            self,
+            binaries: T.Optional[T.Dict[str, T.Union[str, T.List[str]]]] = None,
+    ):
+        self.binaries: T.Dict[str, T.List[str]] = {}
+        if binaries:
+            for name, command in binaries.items():
+                if not isinstance(command, (list, str)):
+                    raise mesonlib.MesonException(
+                        f'Invalid type {command!r} for entry {name!r} in cross file')
+                self.binaries[name] = mesonlib.listify(command)
+
+    @staticmethod
+    def detect_ccache() -> T.List[str]:
+        try:
+            subprocess.check_call(['ccache', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        except (OSError, subprocess.CalledProcessError):
+            return []
+        return ['ccache']
+
+    @staticmethod
+    def detect_sccache() -> T.List[str]:
+        try:
+            subprocess.check_call(['sccache', '--version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+        except (OSError, subprocess.CalledProcessError):
+            return []
+        return ['sccache']
+
+    @staticmethod
+    def detect_compiler_cache() -> T.List[str]:
+        # Sccache is "newer" so it is assumed that people would prefer it by default.
+        cache = BinaryTable.detect_sccache()
+        if cache:
+            return cache
+        return BinaryTable.detect_ccache()
+
+    @classmethod
+    def parse_entry(cls, entry: T.Union[str, T.List[str]]) -> T.Tuple[T.List[str], T.List[str]]:
+        compiler = mesonlib.stringlistify(entry)
+        # Ensure ccache exists and remove it if it doesn't
+        if compiler[0] == 'ccache':
+            compiler = compiler[1:]
+            ccache = cls.detect_ccache()
+        elif compiler[0] == 'sccache':
+            compiler = compiler[1:]
+            ccache = cls.detect_sccache()
+        else:
+            ccache = []
+        # Return value has to be a list of compiler 'choices'
+        return compiler, ccache
+
+    def lookup_entry(self, name: str) -> T.Optional[T.List[str]]:
+        """Lookup binary in cross/native file and fallback to environment.
+
+        Returns command with args as list if found, Returns `None` if nothing is
+        found.
+        """
+        command = self.binaries.get(name)
+        if not command:
+            return None
+        elif not command[0].strip():
+            return None
+        return command
+
+class CMakeVariables:
+    def __init__(self, variables: T.Optional[T.Dict[str, T.Any]] = None) -> None:
+        variables = variables or {}
+        self.variables = {}  # type: T.Dict[str, T.List[str]]
+
+        for key, value in variables.items():
+            value = mesonlib.listify(value)
+            for i in value:
+                if not isinstance(i, str):
+                    raise EnvironmentException(f"Value '{i}' of CMake variable '{key}' defined in a machine file is a {type(i).__name__} and not a str")
+            self.variables[key] = value
+
+    def get_variables(self) -> T.Dict[str, T.List[str]]:
+        return self.variables
diff --git a/vendored-meson/meson/mesonbuild/environment.py b/vendored-meson/meson/mesonbuild/environment.py
new file mode 100644
index 000000000000..ce7c9f1e6c02
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/environment.py
@@ -0,0 +1,917 @@
+# Copyright 2012-2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import itertools
+import os, platform, re, sys, shutil
+import typing as T
+import collections
+
+from . import coredata
+from . import mesonlib
+from .mesonlib import (
+    MesonException, MachineChoice, Popen_safe, PerMachine,
+    PerMachineDefaultable, PerThreeMachineDefaultable, split_args, quote_arg, OptionKey,
+    search_version, MesonBugException
+)
+from . import mlog
+from .programs import ExternalProgram
+
+from .envconfig import (
+    BinaryTable, MachineInfo, Properties, known_cpu_families, CMakeVariables,
+)
+from . import compilers
+from .compilers import (
+    Compiler,
+    is_assembly,
+    is_header,
+    is_library,
+    is_llvm_ir,
+    is_object,
+    is_source,
+)
+
+from functools import lru_cache
+from mesonbuild import envconfig
+
+if T.TYPE_CHECKING:
+    import argparse
+    from configparser import ConfigParser
+
+    from .wrap.wrap import Resolver
+
+    CompilersDict = T.Dict[str, Compiler]
+
+
+build_filename = 'meson.build'
+
+
+def _get_env_var(for_machine: MachineChoice, is_cross: bool, var_name: str) -> T.Optional[str]:
+    """
+    Returns the exact env var and the value.
+    """
+    candidates = PerMachine(
+        # The prefixed build version takes priority, but if we are native
+        # compiling we fall back on the unprefixed host version. This
+        # allows native builds to never need to worry about the 'BUILD_*'
+        # ones.
+        ([var_name + '_FOR_BUILD'] if is_cross else [var_name]),
+        # Always just the unprefixed host versions
+        [var_name]
+    )[for_machine]
+    for var in candidates:
+        value = os.environ.get(var)
+        if value is not None:
+            break
+    else:
+        formatted = ', '.join([f'{var!r}' for var in candidates])
+        mlog.debug(f'None of {formatted} are defined in the environment, not changing global flags.')
+        return None
+    mlog.debug(f'Using {var!r} from environment with value: {value!r}')
+    return value
+
+
+def detect_gcovr(min_version: str = '3.3', log: bool = False):
+    gcovr_exe = 'gcovr'
+    try:
+        p, found = Popen_safe([gcovr_exe, '--version'])[0:2]
+    except (FileNotFoundError, PermissionError):
+        # Doesn't exist in PATH or isn't executable
+        return None, None
+    found = search_version(found)
+    if p.returncode == 0 and mesonlib.version_compare(found, '>=' + min_version):
+        if log:
+            mlog.log('Found gcovr-{} at {}'.format(found, quote_arg(shutil.which(gcovr_exe))))
+        return gcovr_exe, found
+    return None, None
+
+def detect_llvm_cov():
+    tools = get_llvm_tool_names('llvm-cov')
+    for tool in tools:
+        if mesonlib.exe_exists([tool, '--version']):
+            return tool
+    return None
+
+def find_coverage_tools() -> T.Tuple[T.Optional[str], T.Optional[str], T.Optional[str], T.Optional[str], T.Optional[str]]:
+    gcovr_exe, gcovr_version = detect_gcovr()
+
+    llvm_cov_exe = detect_llvm_cov()
+
+    lcov_exe = 'lcov'
+    genhtml_exe = 'genhtml'
+
+    if not mesonlib.exe_exists([lcov_exe, '--version']):
+        lcov_exe = None
+    if not mesonlib.exe_exists([genhtml_exe, '--version']):
+        genhtml_exe = None
+
+    return gcovr_exe, gcovr_version, lcov_exe, genhtml_exe, llvm_cov_exe
+
+def detect_ninja(version: str = '1.8.2', log: bool = False) -> T.List[str]:
+    r = detect_ninja_command_and_version(version, log)
+    return r[0] if r else None
+
+def detect_ninja_command_and_version(version: str = '1.8.2', log: bool = False) -> T.Tuple[T.List[str], str]:
+    env_ninja = os.environ.get('NINJA', None)
+    for n in [env_ninja] if env_ninja else ['ninja', 'ninja-build', 'samu']:
+        prog = ExternalProgram(n, silent=True)
+        if not prog.found():
+            continue
+        try:
+            p, found = Popen_safe(prog.command + ['--version'])[0:2]
+        except (FileNotFoundError, PermissionError):
+            # Doesn't exist in PATH or isn't executable
+            continue
+        found = found.strip()
+        # Perhaps we should add a way for the caller to know the failure mode
+        # (not found or too old)
+        if p.returncode == 0 and mesonlib.version_compare(found, '>=' + version):
+            if log:
+                name = os.path.basename(n)
+                if name.endswith('-' + found):
+                    name = name[0:-1 - len(found)]
+                if name == 'ninja-build':
+                    name = 'ninja'
+                if name == 'samu':
+                    name = 'samurai'
+                mlog.log('Found {}-{} at {}'.format(name, found,
+                         ' '.join([quote_arg(x) for x in prog.command])))
+            return (prog.command, found)
+
+def get_llvm_tool_names(tool: str) -> T.List[str]:
+    # Ordered list of possible suffixes of LLVM executables to try. Start with
+    # base, then try newest back to oldest (3.5 is arbitrary), and finally the
+    # devel version. Please note that the development snapshot in Debian does
+    # not have a distinct name. Do not move it to the beginning of the list
+    # unless it becomes a stable release.
+    suffixes = [
+        '', # base (no suffix)
+        '-16',  '16',
+        '-15',  '15',
+        '-14',  '14',
+        '-13',  '13',
+        '-12',  '12',
+        '-11',  '11',
+        '-10',  '10',
+        '-9',   '90',
+        '-8',   '80',
+        '-7',   '70',
+        '-6.0', '60',
+        '-5.0', '50',
+        '-4.0', '40',
+        '-3.9', '39',
+        '-3.8', '38',
+        '-3.7', '37',
+        '-3.6', '36',
+        '-3.5', '35',
+        '-15',    # Debian development snapshot
+        '-devel', # FreeBSD development snapshot
+    ]
+    names: T.List[str] = []
+    for suffix in suffixes:
+        names.append(tool + suffix)
+    return names
+
+def detect_scanbuild() -> T.List[str]:
+    """ Look for scan-build binary on build platform
+
+    First, if a SCANBUILD env variable has been provided, give it precedence
+    on all platforms.
+
+    For most platforms, scan-build is found is the PATH contains a binary
+    named "scan-build". However, some distribution's package manager (FreeBSD)
+    don't. For those, loop through a list of candidates to see if one is
+    available.
+
+    Return: a single-element list of the found scan-build binary ready to be
+        passed to Popen()
+    """
+    exelist: T.List[str] = []
+    if 'SCANBUILD' in os.environ:
+        exelist = split_args(os.environ['SCANBUILD'])
+
+    else:
+        tools = get_llvm_tool_names('scan-build')
+        for tool in tools:
+            which = shutil.which(tool)
+            if which is not None:
+                exelist = [which]
+                break
+
+    if exelist:
+        tool = exelist[0]
+        if os.path.isfile(tool) and os.access(tool, os.X_OK):
+            return [tool]
+    return []
+
+def detect_clangformat() -> T.List[str]:
+    """ Look for clang-format binary on build platform
+
+    Do the same thing as detect_scanbuild to find clang-format except it
+    currently does not check the environment variable.
+
+    Return: a single-element list of the found clang-format binary ready to be
+        passed to Popen()
+    """
+    tools = get_llvm_tool_names('clang-format')
+    for tool in tools:
+        path = shutil.which(tool)
+        if path is not None:
+            return [path]
+    return []
+
+def detect_windows_arch(compilers: CompilersDict) -> str:
+    """
+    Detecting the 'native' architecture of Windows is not a trivial task. We
+    cannot trust that the architecture that Python is built for is the 'native'
+    one because you can run 32-bit apps on 64-bit Windows using WOW64 and
+    people sometimes install 32-bit Python on 64-bit Windows.
+
+    We also can't rely on the architecture of the OS itself, since it's
+    perfectly normal to compile and run 32-bit applications on Windows as if
+    they were native applications. It's a terrible experience to require the
+    user to supply a cross-info file to compile 32-bit applications on 64-bit
+    Windows. Thankfully, the only way to compile things with Visual Studio on
+    Windows is by entering the 'msvc toolchain' environment, which can be
+    easily detected.
+
+    In the end, the sanest method is as follows:
+    1. Check environment variables that are set by Windows and WOW64 to find out
+       if this is x86 (possibly in WOW64), if so use that as our 'native'
+       architecture.
+    2. If the compiler toolchain target architecture is x86, use that as our
+      'native' architecture.
+    3. Otherwise, use the actual Windows architecture
+
+    """
+    os_arch = mesonlib.windows_detect_native_arch()
+    if os_arch == 'x86':
+        return os_arch
+    # If we're on 64-bit Windows, 32-bit apps can be compiled without
+    # cross-compilation. So if we're doing that, just set the native arch as
+    # 32-bit and pretend like we're running under WOW64. Else, return the
+    # actual Windows architecture that we deduced above.
+    for compiler in compilers.values():
+        if compiler.id == 'msvc' and (compiler.target in {'x86', '80x86'}):
+            return 'x86'
+        if compiler.id == 'clang-cl' and compiler.target == 'x86':
+            return 'x86'
+        if compiler.id == 'gcc' and compiler.has_builtin_define('__i386__'):
+            return 'x86'
+    return os_arch
+
+def any_compiler_has_define(compilers: CompilersDict, define: str) -> bool:
+    for c in compilers.values():
+        try:
+            if c.has_builtin_define(define):
+                return True
+        except mesonlib.MesonException:
+            # Ignore compilers that do not support has_builtin_define.
+            pass
+    return False
+
+def detect_cpu_family(compilers: CompilersDict) -> str:
+    """
+    Python is inconsistent in its platform module.
+    It returns different values for the same cpu.
+    For x86 it might return 'x86', 'i686' or somesuch.
+    Do some canonicalization.
+    """
+    if mesonlib.is_windows():
+        trial = detect_windows_arch(compilers)
+    elif mesonlib.is_freebsd() or mesonlib.is_netbsd() or mesonlib.is_openbsd() or mesonlib.is_qnx() or mesonlib.is_aix():
+        trial = platform.processor().lower()
+    else:
+        trial = platform.machine().lower()
+    if trial.startswith('i') and trial.endswith('86'):
+        trial = 'x86'
+    elif trial == 'bepc':
+        trial = 'x86'
+    elif trial == 'arm64':
+        trial = 'aarch64'
+    elif trial.startswith('aarch64'):
+        # This can be `aarch64_be`
+        trial = 'aarch64'
+    elif trial.startswith('arm') or trial.startswith('earm'):
+        trial = 'arm'
+    elif trial.startswith(('powerpc64', 'ppc64')):
+        trial = 'ppc64'
+    elif trial.startswith(('powerpc', 'ppc')) or trial in {'macppc', 'power macintosh'}:
+        trial = 'ppc'
+    elif trial in {'amd64', 'x64', 'i86pc'}:
+        trial = 'x86_64'
+    elif trial in {'sun4u', 'sun4v'}:
+        trial = 'sparc64'
+    elif trial.startswith('mips'):
+        if '64' not in trial:
+            trial = 'mips'
+        else:
+            trial = 'mips64'
+    elif trial in {'ip30', 'ip35'}:
+        trial = 'mips64'
+
+    # On Linux (and maybe others) there can be any mixture of 32/64 bit code in
+    # the kernel, Python, system, 32-bit chroot on 64-bit host, etc. The only
+    # reliable way to know is to check the compiler defines.
+    if trial == 'x86_64':
+        if any_compiler_has_define(compilers, '__i386__'):
+            trial = 'x86'
+    elif trial == 'aarch64':
+        if any_compiler_has_define(compilers, '__arm__'):
+            trial = 'arm'
+    # Add more quirks here as bugs are reported. Keep in sync with detect_cpu()
+    # below.
+    elif trial == 'parisc64':
+        # ATM there is no 64 bit userland for PA-RISC. Thus always
+        # report it as 32 bit for simplicity.
+        trial = 'parisc'
+    elif trial == 'ppc':
+        # AIX always returns powerpc, check here for 64-bit
+        if any_compiler_has_define(compilers, '__64BIT__'):
+            trial = 'ppc64'
+    # MIPS64 is able to run MIPS32 code natively, so there is a chance that
+    # such mixture mentioned above exists.
+    elif trial == 'mips64':
+        if not any_compiler_has_define(compilers, '__mips64'):
+            trial = 'mips'
+
+    if trial not in known_cpu_families:
+        mlog.warning(f'Unknown CPU family {trial!r}, please report this at '
+                     'https://github.com/mesonbuild/meson/issues/new with the '
+                     'output of `uname -a` and `cat /proc/cpuinfo`')
+
+    return trial
+
+def detect_cpu(compilers: CompilersDict) -> str:
+    if mesonlib.is_windows():
+        trial = detect_windows_arch(compilers)
+    elif mesonlib.is_freebsd() or mesonlib.is_netbsd() or mesonlib.is_openbsd() or mesonlib.is_aix():
+        trial = platform.processor().lower()
+    else:
+        trial = platform.machine().lower()
+
+    if trial in {'amd64', 'x64', 'i86pc'}:
+        trial = 'x86_64'
+    if trial == 'x86_64':
+        # Same check as above for cpu_family
+        if any_compiler_has_define(compilers, '__i386__'):
+            trial = 'i686' # All 64 bit cpus have at least this level of x86 support.
+    elif trial.startswith('aarch64') or trial.startswith('arm64'):
+        # Same check as above for cpu_family
+        if any_compiler_has_define(compilers, '__arm__'):
+            trial = 'arm'
+        else:
+            # for aarch64_be
+            trial = 'aarch64'
+    elif trial.startswith('earm'):
+        trial = 'arm'
+    elif trial == 'e2k':
+        # Make more precise CPU detection for Elbrus platform.
+        trial = platform.processor().lower()
+    elif trial.startswith('mips'):
+        if '64' not in trial:
+            trial = 'mips'
+        else:
+            if not any_compiler_has_define(compilers, '__mips64'):
+                trial = 'mips'
+            else:
+                trial = 'mips64'
+    elif trial == 'ppc':
+        # AIX always returns powerpc, check here for 64-bit
+        if any_compiler_has_define(compilers, '__64BIT__'):
+            trial = 'ppc64'
+
+    # Add more quirks here as bugs are reported. Keep in sync with
+    # detect_cpu_family() above.
+    return trial
+
+KERNEL_MAPPINGS: T.Mapping[str, str] = {'freebsd': 'freebsd',
+                                        'openbsd': 'openbsd',
+                                        'netbsd': 'netbsd',
+                                        'windows': 'nt',
+                                        'android': 'linux',
+                                        'linux': 'linux',
+                                        'cygwin': 'nt',
+                                        'darwin': 'xnu',
+                                        'dragonfly': 'dragonfly',
+                                        'haiku': 'haiku',
+                                        }
+
+def detect_kernel(system: str) -> T.Optional[str]:
+    if system == 'sunos':
+        # This needs to be /usr/bin/uname because gnu-uname could be installed and
+        # won't provide the necessary information
+        p, out, _ = Popen_safe(['/usr/bin/uname', '-o'])
+        if p.returncode != 0:
+            raise MesonException('Failed to run "/usr/bin/uname -o"')
+        out = out.lower().strip()
+        if out not in {'illumos', 'solaris'}:
+            mlog.warning(f'Got an unexpected value for kernel on a SunOS derived platform, expcted either "illumos" or "solaris", but got "{out}".'
+                         "Please open a Meson issue with the OS you're running and the value detected for your kernel.")
+            return None
+        return out
+    return KERNEL_MAPPINGS.get(system, None)
+
+def detect_subsystem(system: str) -> T.Optional[str]:
+    if system == 'darwin':
+        return 'macos'
+    return system
+
+def detect_system() -> str:
+    if sys.platform == 'cygwin':
+        return 'cygwin'
+    return platform.system().lower()
+
+def detect_msys2_arch() -> T.Optional[str]:
+    return os.environ.get('MSYSTEM_CARCH', None)
+
+def detect_machine_info(compilers: T.Optional[CompilersDict] = None) -> MachineInfo:
+    """Detect the machine we're running on
+
+    If compilers are not provided, we cannot know as much. None out those
+    fields to avoid accidentally depending on partial knowledge. The
+    underlying ''detect_*'' method can be called to explicitly use the
+    partial information.
+    """
+    system = detect_system()
+    return MachineInfo(
+        system,
+        detect_cpu_family(compilers) if compilers is not None else None,
+        detect_cpu(compilers) if compilers is not None else None,
+        sys.byteorder,
+        detect_kernel(system),
+        detect_subsystem(system))
+
+# TODO make this compare two `MachineInfo`s purely. How important is the
+# `detect_cpu_family({})` distinction? It is the one impediment to that.
+def machine_info_can_run(machine_info: MachineInfo):
+    """Whether we can run binaries for this machine on the current machine.
+
+    Can almost always run 32-bit binaries on 64-bit natively if the host
+    and build systems are the same. We don't pass any compilers to
+    detect_cpu_family() here because we always want to know the OS
+    architecture, not what the compiler environment tells us.
+    """
+    if machine_info.system != detect_system():
+        return False
+    true_build_cpu_family = detect_cpu_family({})
+    return \
+        (machine_info.cpu_family == true_build_cpu_family) or \
+        ((true_build_cpu_family == 'x86_64') and (machine_info.cpu_family == 'x86')) or \
+        ((true_build_cpu_family == 'aarch64') and (machine_info.cpu_family == 'arm'))
+
+class Environment:
+    private_dir = 'meson-private'
+    log_dir = 'meson-logs'
+    info_dir = 'meson-info'
+
+    def __init__(self, source_dir: T.Optional[str], build_dir: T.Optional[str], options: 'argparse.Namespace') -> None:
+        self.source_dir = source_dir
+        self.build_dir = build_dir
+        # Do not try to create build directories when build_dir is none.
+        # This reduced mode is used by the --buildoptions introspector
+        if build_dir is not None:
+            self.scratch_dir = os.path.join(build_dir, Environment.private_dir)
+            self.log_dir = os.path.join(build_dir, Environment.log_dir)
+            self.info_dir = os.path.join(build_dir, Environment.info_dir)
+            os.makedirs(self.scratch_dir, exist_ok=True)
+            os.makedirs(self.log_dir, exist_ok=True)
+            os.makedirs(self.info_dir, exist_ok=True)
+            try:
+                self.coredata: coredata.CoreData = coredata.load(self.get_build_dir())
+                self.first_invocation = False
+            except FileNotFoundError:
+                self.create_new_coredata(options)
+            except coredata.MesonVersionMismatchException as e:
+                # This is routine, but tell the user the update happened
+                mlog.log('Regenerating configuration from scratch:', str(e))
+                coredata.read_cmd_line_file(self.build_dir, options)
+                self.create_new_coredata(options)
+            except MesonException as e:
+                # If we stored previous command line options, we can recover from
+                # a broken/outdated coredata.
+                if os.path.isfile(coredata.get_cmd_line_file(self.build_dir)):
+                    mlog.warning('Regenerating configuration from scratch.', fatal=False)
+                    mlog.log('Reason:', mlog.red(str(e)))
+                    coredata.read_cmd_line_file(self.build_dir, options)
+                    self.create_new_coredata(options)
+                else:
+                    raise e
+        else:
+            # Just create a fresh coredata in this case
+            self.scratch_dir = ''
+            self.create_new_coredata(options)
+
+        ## locally bind some unfrozen configuration
+
+        # Stores machine infos, the only *three* machine one because we have a
+        # target machine info on for the user (Meson never cares about the
+        # target machine.)
+        machines: PerThreeMachineDefaultable[MachineInfo] = PerThreeMachineDefaultable()
+
+        # Similar to coredata.compilers, but lower level in that there is no
+        # meta data, only names/paths.
+        binaries: PerMachineDefaultable[BinaryTable] = PerMachineDefaultable()
+
+        # Misc other properties about each machine.
+        properties: PerMachineDefaultable[Properties] = PerMachineDefaultable()
+
+        # CMake toolchain variables
+        cmakevars: PerMachineDefaultable[CMakeVariables] = PerMachineDefaultable()
+
+        ## Setup build machine defaults
+
+        # Will be fully initialized later using compilers later.
+        machines.build = detect_machine_info()
+
+        # Just uses hard-coded defaults and environment variables. Might be
+        # overwritten by a native file.
+        binaries.build = BinaryTable()
+        properties.build = Properties()
+
+        # Options with the key parsed into an OptionKey type.
+        #
+        # Note that order matters because of 'buildtype', if it is after
+        # 'optimization' and 'debug' keys, it override them.
+        self.options: T.MutableMapping[OptionKey, T.Union[str, T.List[str]]] = collections.OrderedDict()
+
+        ## Read in native file(s) to override build machine configuration
+
+        if self.coredata.config_files is not None:
+            config = coredata.parse_machine_files(self.coredata.config_files)
+            binaries.build = BinaryTable(config.get('binaries', {}))
+            properties.build = Properties(config.get('properties', {}))
+            cmakevars.build = CMakeVariables(config.get('cmake', {}))
+            self._load_machine_file_options(
+                config, properties.build,
+                MachineChoice.BUILD if self.coredata.cross_files else MachineChoice.HOST)
+
+        ## Read in cross file(s) to override host machine configuration
+
+        if self.coredata.cross_files:
+            config = coredata.parse_machine_files(self.coredata.cross_files)
+            properties.host = Properties(config.get('properties', {}))
+            binaries.host = BinaryTable(config.get('binaries', {}))
+            cmakevars.host = CMakeVariables(config.get('cmake', {}))
+            if 'host_machine' in config:
+                machines.host = MachineInfo.from_literal(config['host_machine'])
+            if 'target_machine' in config:
+                machines.target = MachineInfo.from_literal(config['target_machine'])
+            # Keep only per machine options from the native file. The cross
+            # file takes precedence over all other options.
+            for key, value in list(self.options.items()):
+                if self.coredata.is_per_machine_option(key):
+                    self.options[key.as_build()] = value
+            self._load_machine_file_options(config, properties.host, MachineChoice.HOST)
+
+        ## "freeze" now initialized configuration, and "save" to the class.
+
+        self.machines = machines.default_missing()
+        self.binaries = binaries.default_missing()
+        self.properties = properties.default_missing()
+        self.cmakevars = cmakevars.default_missing()
+
+        # Command line options override those from cross/native files
+        self.options.update(options.cmd_line_options)
+
+        # Take default value from env if not set in cross/native files or command line.
+        self._set_default_options_from_env()
+        self._set_default_binaries_from_env()
+        self._set_default_properties_from_env()
+
+        # Warn if the user is using two different ways of setting build-type
+        # options that override each other
+        bt = OptionKey('buildtype')
+        db = OptionKey('debug')
+        op = OptionKey('optimization')
+        if bt in self.options and (db in self.options or op in self.options):
+            mlog.warning('Recommend using either -Dbuildtype or -Doptimization + -Ddebug. '
+                         'Using both is redundant since they override each other. '
+                         'See: https://mesonbuild.com/Builtin-options.html#build-type-options',
+                         fatal=False)
+
+        exe_wrapper = self.lookup_binary_entry(MachineChoice.HOST, 'exe_wrapper')
+        if exe_wrapper is not None:
+            self.exe_wrapper = ExternalProgram.from_bin_list(self, MachineChoice.HOST, 'exe_wrapper')
+        else:
+            self.exe_wrapper = None
+
+        self.default_cmake = ['cmake']
+        self.default_pkgconfig = ['pkg-config']
+        self.wrap_resolver: T.Optional['Resolver'] = None
+
+    def _load_machine_file_options(self, config: 'ConfigParser', properties: Properties, machine: MachineChoice) -> None:
+        """Read the contents of a Machine file and put it in the options store."""
+
+        # Look for any options in the deprecated paths section, warn about
+        # those, then assign them. They will be overwritten by the ones in the
+        # "built-in options" section if they're in both sections.
+        paths = config.get('paths')
+        if paths:
+            mlog.deprecation('The [paths] section is deprecated, use the [built-in options] section instead.')
+            for k, v in paths.items():
+                self.options[OptionKey.from_string(k).evolve(machine=machine)] = v
+
+        # Next look for compiler options in the "properties" section, this is
+        # also deprecated, and these will also be overwritten by the "built-in
+        # options" section. We need to remove these from this section, as well.
+        deprecated_properties: T.Set[str] = set()
+        for lang in compilers.all_languages:
+            deprecated_properties.add(lang + '_args')
+            deprecated_properties.add(lang + '_link_args')
+        for k, v in properties.properties.copy().items():
+            if k in deprecated_properties:
+                mlog.deprecation(f'{k} in the [properties] section of the machine file is deprecated, use the [built-in options] section.')
+                self.options[OptionKey.from_string(k).evolve(machine=machine)] = v
+                del properties.properties[k]
+
+        for section, values in config.items():
+            if ':' in section:
+                subproject, section = section.split(':')
+            else:
+                subproject = ''
+            if section == 'built-in options':
+                for k, v in values.items():
+                    key = OptionKey.from_string(k)
+                    # If we're in the cross file, and there is a `build.foo` warn about that. Later we'll remove it.
+                    if machine is MachineChoice.HOST and key.machine is not machine:
+                        mlog.deprecation('Setting build machine options in cross files, please use a native file instead, this will be removed in meson 0.60', once=True)
+                    if key.subproject:
+                        raise MesonException('Do not set subproject options in [built-in options] section, use [subproject:built-in options] instead.')
+                    self.options[key.evolve(subproject=subproject, machine=machine)] = v
+            elif section == 'project options' and machine is MachineChoice.HOST:
+                # Project options are only for the host machine, we don't want
+                # to read these from the native file
+                for k, v in values.items():
+                    # Project options are always for the host machine
+                    key = OptionKey.from_string(k)
+                    if key.subproject:
+                        raise MesonException('Do not set subproject options in [built-in options] section, use [subproject:built-in options] instead.')
+                    self.options[key.evolve(subproject=subproject)] = v
+
+    def _set_default_options_from_env(self) -> None:
+        opts: T.List[T.Tuple[str, str]] = (
+            [(v, f'{k}_args') for k, v in compilers.compilers.CFLAGS_MAPPING.items()] +
+            [
+                ('PKG_CONFIG_PATH', 'pkg_config_path'),
+                ('CMAKE_PREFIX_PATH', 'cmake_prefix_path'),
+                ('LDFLAGS', 'ldflags'),
+                ('CPPFLAGS', 'cppflags'),
+            ]
+        )
+
+        env_opts: T.DefaultDict[OptionKey, T.List[str]] = collections.defaultdict(list)
+
+        for (evar, keyname), for_machine in itertools.product(opts, MachineChoice):
+            p_env = _get_env_var(for_machine, self.is_cross_build(), evar)
+            if p_env is not None:
+                # these may contain duplicates, which must be removed, else
+                # a duplicates-in-array-option warning arises.
+                if keyname == 'cmake_prefix_path':
+                    if self.machines[for_machine].is_windows():
+                        # Cannot split on ':' on Windows because its in the drive letter
+                        _p_env = p_env.split(os.pathsep)
+                    else:
+                        # https://github.com/mesonbuild/meson/issues/7294
+                        _p_env = re.split(r':|;', p_env)
+                    p_list = list(mesonlib.OrderedSet(_p_env))
+                elif keyname == 'pkg_config_path':
+                    p_list = list(mesonlib.OrderedSet(p_env.split(os.pathsep)))
+                else:
+                    p_list = split_args(p_env)
+                p_list = [e for e in p_list if e]  # filter out any empty elements
+
+                # Take env vars only on first invocation, if the env changes when
+                # reconfiguring it gets ignored.
+                # FIXME: We should remember if we took the value from env to warn
+                # if it changes on future invocations.
+                if self.first_invocation:
+                    if keyname == 'ldflags':
+                        key = OptionKey('link_args', machine=for_machine, lang='c')  # needs a language to initialize properly
+                        for lang in compilers.compilers.LANGUAGES_USING_LDFLAGS:
+                            key = key.evolve(lang=lang)
+                            env_opts[key].extend(p_list)
+                    elif keyname == 'cppflags':
+                        key = OptionKey('env_args', machine=for_machine, lang='c')
+                        for lang in compilers.compilers.LANGUAGES_USING_CPPFLAGS:
+                            key = key.evolve(lang=lang)
+                            env_opts[key].extend(p_list)
+                    else:
+                        key = OptionKey.from_string(keyname).evolve(machine=for_machine)
+                        if evar in compilers.compilers.CFLAGS_MAPPING.values():
+                            # If this is an environment variable, we have to
+                            # store it separately until the compiler is
+                            # instantiated, as we don't know whether the
+                            # compiler will want to use these arguments at link
+                            # time and compile time (instead of just at compile
+                            # time) until we're instantiating that `Compiler`
+                            # object. This is required so that passing
+                            # `-Dc_args=` on the command line and `$CFLAGS`
+                            # have subtly different behavior. `$CFLAGS` will be
+                            # added to the linker command line if the compiler
+                            # acts as a linker driver, `-Dc_args` will not.
+                            #
+                            # We still use the original key as the base here, as
+                            # we want to inherit the machine and the compiler
+                            # language
+                            key = key.evolve('env_args')
+                        env_opts[key].extend(p_list)
+
+        # Only store options that are not already in self.options,
+        # otherwise we'd override the machine files
+        for k, v in env_opts.items():
+            if k not in self.options:
+                self.options[k] = v
+
+    def _set_default_binaries_from_env(self) -> None:
+        """Set default binaries from the environment.
+
+        For example, pkg-config can be set via PKG_CONFIG, or in the machine
+        file. We want to set the default to the env variable.
+        """
+        opts = itertools.chain(envconfig.DEPRECATED_ENV_PROG_MAP.items(),
+                               envconfig.ENV_VAR_PROG_MAP.items())
+
+        for (name, evar), for_machine in itertools.product(opts, MachineChoice):
+            p_env = _get_env_var(for_machine, self.is_cross_build(), evar)
+            if p_env is not None:
+                self.binaries[for_machine].binaries.setdefault(name, mesonlib.split_args(p_env))
+
+    def _set_default_properties_from_env(self) -> None:
+        """Properties which can also be set from the environment."""
+        # name, evar, split
+        opts: T.List[T.Tuple[str, T.List[str], bool]] = [
+            ('boost_includedir', ['BOOST_INCLUDEDIR'], False),
+            ('boost_librarydir', ['BOOST_LIBRARYDIR'], False),
+            ('boost_root', ['BOOST_ROOT', 'BOOSTROOT'], True),
+            ('java_home', ['JAVA_HOME'], False),
+        ]
+
+        for (name, evars, split), for_machine in itertools.product(opts, MachineChoice):
+            for evar in evars:
+                p_env = _get_env_var(for_machine, self.is_cross_build(), evar)
+                if p_env is not None:
+                    if split:
+                        self.properties[for_machine].properties.setdefault(name, p_env.split(os.pathsep))
+                    else:
+                        self.properties[for_machine].properties.setdefault(name, p_env)
+                    break
+
+    def create_new_coredata(self, options: 'argparse.Namespace') -> None:
+        # WARNING: Don't use any values from coredata in __init__. It gets
+        # re-initialized with project options by the interpreter during
+        # build file parsing.
+        # meson_command is used by the regenchecker script, which runs meson
+        self.coredata = coredata.CoreData(options, self.scratch_dir, mesonlib.get_meson_command())
+        self.first_invocation = True
+
+    def is_cross_build(self, when_building_for: MachineChoice = MachineChoice.HOST) -> bool:
+        return self.coredata.is_cross_build(when_building_for)
+
+    def dump_coredata(self) -> str:
+        return coredata.save(self.coredata, self.get_build_dir())
+
+    def get_log_dir(self) -> str:
+        return self.log_dir
+
+    def get_coredata(self) -> coredata.CoreData:
+        return self.coredata
+
+    @staticmethod
+    def get_build_command(unbuffered: bool = False) -> T.List[str]:
+        cmd = mesonlib.get_meson_command()
+        if cmd is None:
+            raise MesonBugException('No command?')
+        cmd = cmd.copy()
+        if unbuffered and 'python' in os.path.basename(cmd[0]):
+            cmd.insert(1, '-u')
+        return cmd
+
+    def is_header(self, fname: 'mesonlib.FileOrString') -> bool:
+        return is_header(fname)
+
+    def is_source(self, fname: 'mesonlib.FileOrString') -> bool:
+        return is_source(fname)
+
+    def is_assembly(self, fname: 'mesonlib.FileOrString') -> bool:
+        return is_assembly(fname)
+
+    def is_llvm_ir(self, fname: 'mesonlib.FileOrString') -> bool:
+        return is_llvm_ir(fname)
+
+    def is_object(self, fname: 'mesonlib.FileOrString') -> bool:
+        return is_object(fname)
+
+    @lru_cache(maxsize=None)
+    def is_library(self, fname: mesonlib.FileOrString):
+        return is_library(fname)
+
+    def lookup_binary_entry(self, for_machine: MachineChoice, name: str) -> T.Optional[T.List[str]]:
+        return self.binaries[for_machine].lookup_entry(name)
+
+    def get_scratch_dir(self) -> str:
+        return self.scratch_dir
+
+    def get_source_dir(self) -> str:
+        return self.source_dir
+
+    def get_build_dir(self) -> str:
+        return self.build_dir
+
+    def get_import_lib_dir(self) -> str:
+        "Install dir for the import library (library used for linking)"
+        return self.get_libdir()
+
+    def get_shared_module_dir(self) -> str:
+        "Install dir for shared modules that are loaded at runtime"
+        return self.get_libdir()
+
+    def get_shared_lib_dir(self) -> str:
+        "Install dir for the shared library"
+        m = self.machines.host
+        # Windows has no RPATH or similar, so DLLs must be next to EXEs.
+        if m.is_windows() or m.is_cygwin():
+            return self.get_bindir()
+        return self.get_libdir()
+
+    def get_jar_dir(self) -> str:
+        """Install dir for JAR files"""
+        return f"{self.get_datadir()}/java"
+
+    def get_static_lib_dir(self) -> str:
+        "Install dir for the static library"
+        return self.get_libdir()
+
+    def get_prefix(self) -> str:
+        return self.coredata.get_option(OptionKey('prefix'))
+
+    def get_libdir(self) -> str:
+        return self.coredata.get_option(OptionKey('libdir'))
+
+    def get_libexecdir(self) -> str:
+        return self.coredata.get_option(OptionKey('libexecdir'))
+
+    def get_bindir(self) -> str:
+        return self.coredata.get_option(OptionKey('bindir'))
+
+    def get_includedir(self) -> str:
+        return self.coredata.get_option(OptionKey('includedir'))
+
+    def get_mandir(self) -> str:
+        return self.coredata.get_option(OptionKey('mandir'))
+
+    def get_datadir(self) -> str:
+        return self.coredata.get_option(OptionKey('datadir'))
+
+    def get_compiler_system_lib_dirs(self, for_machine: MachineChoice) -> T.List[str]:
+        for comp in self.coredata.compilers[for_machine].values():
+            if comp.id == 'clang':
+                index = 1
+                break
+            elif comp.id == 'gcc':
+                index = 2
+                break
+        else:
+            # This option is only supported by gcc and clang. If we don't get a
+            # GCC or Clang compiler return and empty list.
+            return []
+
+        p, out, _ = Popen_safe(comp.get_exelist() + ['-print-search-dirs'])
+        if p.returncode != 0:
+            raise mesonlib.MesonException('Could not calculate system search dirs')
+        out = out.split('\n')[index].lstrip('libraries: =').split(':')
+        return [os.path.normpath(p) for p in out]
+
+    def get_compiler_system_include_dirs(self, for_machine: MachineChoice) -> T.List[str]:
+        for comp in self.coredata.compilers[for_machine].values():
+            if comp.id == 'clang':
+                break
+            elif comp.id == 'gcc':
+                break
+        else:
+            # This option is only supported by gcc and clang. If we don't get a
+            # GCC or Clang compiler return and empty list.
+            return []
+        return comp.get_default_include_dirs()
+
+    def need_exe_wrapper(self, for_machine: MachineChoice = MachineChoice.HOST):
+        value = self.properties[for_machine].get('needs_exe_wrapper', None)
+        if value is not None:
+            return value
+        return not machine_info_can_run(self.machines[for_machine])
+
+    def get_exe_wrapper(self) -> T.Optional[ExternalProgram]:
+        if not self.need_exe_wrapper():
+            return None
+        return self.exe_wrapper
diff --git a/vendored-meson/meson/mesonbuild/interpreter/__init__.py b/vendored-meson/meson/mesonbuild/interpreter/__init__.py
new file mode 100644
index 000000000000..016e4dce5ac1
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreter/__init__.py
@@ -0,0 +1,59 @@
+# SPDX-license-identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Meson interpreter."""
+
+__all__ = [
+    'Interpreter',
+    'permitted_dependency_kwargs',
+
+    'CompilerHolder',
+
+    'ExecutableHolder',
+    'BuildTargetHolder',
+    'CustomTargetHolder',
+    'CustomTargetIndexHolder',
+    'MachineHolder',
+    'Test',
+    'ConfigurationDataHolder',
+    'SubprojectHolder',
+    'DependencyHolder',
+    'GeneratedListHolder',
+    'ExternalProgramHolder',
+    'extract_required_kwarg',
+
+    'ArrayHolder',
+    'BooleanHolder',
+    'DictHolder',
+    'IntegerHolder',
+    'StringHolder',
+]
+
+from .interpreter import Interpreter, permitted_dependency_kwargs
+from .compiler import CompilerHolder
+from .interpreterobjects import (ExecutableHolder, BuildTargetHolder, CustomTargetHolder,
+                                 CustomTargetIndexHolder, MachineHolder, Test,
+                                 ConfigurationDataHolder, SubprojectHolder, DependencyHolder,
+                                 GeneratedListHolder, ExternalProgramHolder,
+                                 extract_required_kwarg)
+
+from .primitives import (
+    ArrayHolder,
+    BooleanHolder,
+    DictHolder,
+    IntegerHolder,
+    StringHolder,
+)
diff --git a/vendored-meson/meson/mesonbuild/interpreter/compiler.py b/vendored-meson/meson/mesonbuild/interpreter/compiler.py
new file mode 100644
index 000000000000..52737c4e0f5b
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreter/compiler.py
@@ -0,0 +1,805 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+from __future__ import annotations
+
+import collections
+import enum
+import functools
+import os
+import itertools
+import typing as T
+
+from .. import build
+from .. import coredata
+from .. import dependencies
+from .. import mesonlib
+from .. import mlog
+from ..compilers import SUFFIX_TO_LANG
+from ..compilers.compilers import CompileCheckMode
+from ..interpreterbase import (ObjectHolder, noPosargs, noKwargs,
+                               FeatureNew, disablerIfNotFound,
+                               InterpreterException)
+from ..interpreterbase.decorators import ContainerTypeInfo, typed_kwargs, KwargInfo, typed_pos_args
+from ..mesonlib import OptionKey
+from .interpreterobjects import (extract_required_kwarg, extract_search_dirs)
+from .type_checking import REQUIRED_KW, in_set_validator, NoneType
+
+if T.TYPE_CHECKING:
+    from ..interpreter import Interpreter
+    from ..compilers import Compiler, RunResult
+    from ..interpreterbase import TYPE_var, TYPE_kwargs
+    from .kwargs import ExtractRequired, ExtractSearchDirs
+    from .interpreter.interpreter import SourceOutputs
+
+    from typing_extensions import TypedDict, Literal
+
+    class GetSupportedArgumentKw(TypedDict):
+
+        checked: Literal['warn', 'require', 'off']
+
+    class AlignmentKw(TypedDict):
+
+        prefix: str
+        args: T.List[str]
+        dependencies: T.List[dependencies.Dependency]
+
+    class CompileKW(TypedDict):
+
+        name: str
+        no_builtin_args: bool
+        include_directories: T.List[build.IncludeDirs]
+        args: T.List[str]
+        dependencies: T.List[dependencies.Dependency]
+
+    class CommonKW(TypedDict):
+
+        prefix: str
+        no_builtin_args: bool
+        include_directories: T.List[build.IncludeDirs]
+        args: T.List[str]
+        dependencies: T.List[dependencies.Dependency]
+
+    class ComputeIntKW(CommonKW):
+
+        guess: T.Optional[int]
+        high: T.Optional[int]
+        low: T.Optional[int]
+
+    class HeaderKW(CommonKW, ExtractRequired):
+        pass
+
+    class FindLibraryKW(ExtractRequired, ExtractSearchDirs):
+
+        disabler: bool
+        has_headers: T.List[str]
+        static: bool
+
+        # This list must be all of the `HeaderKW` values with `header_`
+        # prepended to the key
+        header_args: T.List[str]
+        header_dependencies: T.List[dependencies.Dependency]
+        header_include_directories: T.List[build.IncludeDirs]
+        header_no_builtin_args: bool
+        header_prefix: str
+        header_required: T.Union[bool, coredata.UserFeatureOption]
+
+    class PreprocessKW(TypedDict):
+        output: str
+        compile_args: T.List[str]
+        include_directories: T.List[build.IncludeDirs]
+        dependencies: T.List[dependencies.Dependency]
+
+
+class _TestMode(enum.Enum):
+
+    """Whether we're doing a compiler or linker check."""
+
+    COMPILER = 0
+    LINKER = 1
+
+
+class TryRunResultHolder(ObjectHolder['RunResult']):
+    def __init__(self, res: 'RunResult', interpreter: 'Interpreter'):
+        super().__init__(res, interpreter)
+        self.methods.update({'returncode': self.returncode_method,
+                             'compiled': self.compiled_method,
+                             'stdout': self.stdout_method,
+                             'stderr': self.stderr_method,
+                             })
+
+    @noPosargs
+    @noKwargs
+    def returncode_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> int:
+        return self.held_object.returncode
+
+    @noPosargs
+    @noKwargs
+    def compiled_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+        return self.held_object.compiled
+
+    @noPosargs
+    @noKwargs
+    def stdout_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        return self.held_object.stdout
+
+    @noPosargs
+    @noKwargs
+    def stderr_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        return self.held_object.stderr
+
+
+_ARGS_KW: KwargInfo[T.List[str]] = KwargInfo(
+    'args',
+    ContainerTypeInfo(list, str),
+    listify=True,
+    default=[],
+)
+_DEPENDENCIES_KW: KwargInfo[T.List['dependencies.Dependency']] = KwargInfo(
+    'dependencies',
+    ContainerTypeInfo(list, dependencies.Dependency),
+    listify=True,
+    default=[],
+)
+_INCLUDE_DIRS_KW: KwargInfo[T.List[build.IncludeDirs]] = KwargInfo(
+    'include_directories',
+    ContainerTypeInfo(list, build.IncludeDirs),
+    default=[],
+    listify=True,
+)
+_PREFIX_KW: KwargInfo[str] = KwargInfo(
+    'prefix',
+    (str, ContainerTypeInfo(list, str)),
+    default='',
+    since_values={list: '1.0.0'},
+    convertor=lambda x: '\n'.join(x) if isinstance(x, list) else x)
+
+_NO_BUILTIN_ARGS_KW = KwargInfo('no_builtin_args', bool, default=False)
+_NAME_KW = KwargInfo('name', str, default='')
+
+# Many of the compiler methods take this kwarg signature exactly, this allows
+# simplifying the `typed_kwargs` calls
+_COMMON_KWS: T.List[KwargInfo] = [_ARGS_KW, _DEPENDENCIES_KW, _INCLUDE_DIRS_KW, _PREFIX_KW, _NO_BUILTIN_ARGS_KW]
+
+# Common methods of compiles, links, runs, and similar
+_COMPILES_KWS: T.List[KwargInfo] = [_NAME_KW, _ARGS_KW, _DEPENDENCIES_KW, _INCLUDE_DIRS_KW, _NO_BUILTIN_ARGS_KW]
+
+_HEADER_KWS: T.List[KwargInfo] = [REQUIRED_KW.evolve(since='0.50.0', default=False), *_COMMON_KWS]
+
+class CompilerHolder(ObjectHolder['Compiler']):
+    preprocess_uid: T.Dict[str, itertools.count] = collections.defaultdict(itertools.count)
+
+    def __init__(self, compiler: 'Compiler', interpreter: 'Interpreter'):
+        super().__init__(compiler, interpreter)
+        self.environment = self.env
+        self.methods.update({'compiles': self.compiles_method,
+                             'links': self.links_method,
+                             'get_id': self.get_id_method,
+                             'get_linker_id': self.get_linker_id_method,
+                             'compute_int': self.compute_int_method,
+                             'sizeof': self.sizeof_method,
+                             'get_define': self.get_define_method,
+                             'check_header': self.check_header_method,
+                             'has_header': self.has_header_method,
+                             'has_header_symbol': self.has_header_symbol_method,
+                             'run': self.run_method,
+                             'has_function': self.has_function_method,
+                             'has_member': self.has_member_method,
+                             'has_members': self.has_members_method,
+                             'has_type': self.has_type_method,
+                             'alignment': self.alignment_method,
+                             'version': self.version_method,
+                             'cmd_array': self.cmd_array_method,
+                             'find_library': self.find_library_method,
+                             'has_argument': self.has_argument_method,
+                             'has_function_attribute': self.has_func_attribute_method,
+                             'get_supported_function_attributes': self.get_supported_function_attributes_method,
+                             'has_multi_arguments': self.has_multi_arguments_method,
+                             'get_supported_arguments': self.get_supported_arguments_method,
+                             'first_supported_argument': self.first_supported_argument_method,
+                             'has_link_argument': self.has_link_argument_method,
+                             'has_multi_link_arguments': self.has_multi_link_arguments_method,
+                             'get_supported_link_arguments': self.get_supported_link_arguments_method,
+                             'first_supported_link_argument': self.first_supported_link_argument_method,
+                             'symbols_have_underscore_prefix': self.symbols_have_underscore_prefix_method,
+                             'get_argument_syntax': self.get_argument_syntax_method,
+                             'preprocess': self.preprocess_method,
+                             })
+
+    @property
+    def compiler(self) -> 'Compiler':
+        return self.held_object
+
+    def _dep_msg(self, deps: T.List['dependencies.Dependency'], compile_only: bool, endl: str) -> str:
+        msg_single = 'with dependency {}'
+        msg_many = 'with dependencies {}'
+        names = []
+        for d in deps:
+            if isinstance(d, dependencies.InternalDependency):
+                FeatureNew.single_use('compiler method "dependencies" kwarg with internal dep', '0.57.0', self.subproject,
+                                      location=self.current_node)
+                continue
+            if isinstance(d, dependencies.ExternalLibrary):
+                if compile_only:
+                    continue
+                name = '-l' + d.name
+            else:
+                name = d.name
+            names.append(name)
+        if not names:
+            return endl
+        tpl = msg_many if len(names) > 1 else msg_single
+        if endl is None:
+            endl = ''
+        return tpl.format(', '.join(names)) + endl
+
+    @noPosargs
+    @noKwargs
+    def version_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        return self.compiler.version
+
+    @noPosargs
+    @noKwargs
+    def cmd_array_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> T.List[str]:
+        return self.compiler.exelist
+
+    def _determine_args(self, nobuiltins: bool,
+                        incdirs: T.List[build.IncludeDirs],
+                        extra_args: T.List[str],
+                        mode: CompileCheckMode = CompileCheckMode.LINK) -> T.List[str]:
+        args: T.List[str] = []
+        for i in incdirs:
+            for idir in i.to_string_list(self.environment.get_source_dir()):
+                args.extend(self.compiler.get_include_args(idir, False))
+        if not nobuiltins:
+            opts = self.environment.coredata.options
+            args += self.compiler.get_option_compile_args(opts)
+            if mode is CompileCheckMode.LINK:
+                args.extend(self.compiler.get_option_link_args(opts))
+        args.extend(extra_args)
+        return args
+
+    def _determine_dependencies(self, deps: T.List['dependencies.Dependency'], compile_only: bool = False, endl: str = ':') -> T.Tuple[T.List['dependencies.Dependency'], str]:
+        deps = dependencies.get_leaf_external_dependencies(deps)
+        return deps, self._dep_msg(deps, compile_only, endl)
+
+    @typed_pos_args('compiler.alignment', str)
+    @typed_kwargs(
+        'compiler.alignment',
+        _PREFIX_KW,
+        _ARGS_KW,
+        _DEPENDENCIES_KW,
+    )
+    def alignment_method(self, args: T.Tuple[str], kwargs: 'AlignmentKw') -> int:
+        typename = args[0]
+        deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=self.compiler.is_cross)
+        result, cached = self.compiler.alignment(typename, kwargs['prefix'], self.environment,
+                                                 extra_args=kwargs['args'],
+                                                 dependencies=deps)
+        cached_msg = mlog.blue('(cached)') if cached else ''
+        mlog.log('Checking for alignment of',
+                 mlog.bold(typename, True), msg, mlog.bold(str(result)), cached_msg)
+        return result
+
+    @typed_pos_args('compiler.run', (str, mesonlib.File))
+    @typed_kwargs('compiler.run', *_COMPILES_KWS)
+    def run_method(self, args: T.Tuple['mesonlib.FileOrString'], kwargs: 'CompileKW') -> 'RunResult':
+        code = args[0]
+        if isinstance(code, mesonlib.File):
+            self.interpreter.add_build_def_file(code)
+            code = mesonlib.File.from_absolute_file(
+                code.rel_to_builddir(self.environment.source_dir))
+        testname = kwargs['name']
+        extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+        deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=False, endl=None)
+        result = self.compiler.run(code, self.environment, extra_args=extra_args,
+                                   dependencies=deps)
+        if testname:
+            if not result.compiled:
+                h = mlog.red('DID NOT COMPILE')
+            elif result.returncode == 0:
+                h = mlog.green('YES')
+            else:
+                h = mlog.red(f'NO ({result.returncode})')
+            mlog.log('Checking if', mlog.bold(testname, True), msg, 'runs:', h)
+        return result
+
+    @noPosargs
+    @noKwargs
+    def get_id_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        return self.compiler.get_id()
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('compiler.get_linker_id', '0.53.0')
+    def get_linker_id_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        return self.compiler.get_linker_id()
+
+    @noPosargs
+    @noKwargs
+    def symbols_have_underscore_prefix_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+        '''
+        Check if the compiler prefixes _ (underscore) to global C symbols
+        See: https://en.wikipedia.org/wiki/Name_mangling#C
+        '''
+        return self.compiler.symbols_have_underscore_prefix(self.environment)
+
+    @typed_pos_args('compiler.has_member', str, str)
+    @typed_kwargs('compiler.has_member', *_COMMON_KWS)
+    def has_member_method(self, args: T.Tuple[str, str], kwargs: 'CommonKW') -> bool:
+        typename, membername = args
+        extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+        deps, msg = self._determine_dependencies(kwargs['dependencies'])
+        had, cached = self.compiler.has_members(typename, [membername], kwargs['prefix'],
+                                                self.environment,
+                                                extra_args=extra_args,
+                                                dependencies=deps)
+        cached_msg = mlog.blue('(cached)') if cached else ''
+        if had:
+            hadtxt = mlog.green('YES')
+        else:
+            hadtxt = mlog.red('NO')
+        mlog.log('Checking whether type', mlog.bold(typename, True),
+                 'has member', mlog.bold(membername, True), msg, hadtxt, cached_msg)
+        return had
+
+    @typed_pos_args('compiler.has_members', str, varargs=str, min_varargs=1)
+    @typed_kwargs('compiler.has_members', *_COMMON_KWS)
+    def has_members_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'CommonKW') -> bool:
+        typename, membernames = args
+        extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+        deps, msg = self._determine_dependencies(kwargs['dependencies'])
+        had, cached = self.compiler.has_members(typename, membernames, kwargs['prefix'],
+                                                self.environment,
+                                                extra_args=extra_args,
+                                                dependencies=deps)
+        cached_msg = mlog.blue('(cached)') if cached else ''
+        if had:
+            hadtxt = mlog.green('YES')
+        else:
+            hadtxt = mlog.red('NO')
+        members = mlog.bold(', '.join([f'"{m}"' for m in membernames]))
+        mlog.log('Checking whether type', mlog.bold(typename, True),
+                 'has members', members, msg, hadtxt, cached_msg)
+        return had
+
+    @typed_pos_args('compiler.has_function', str)
+    @typed_kwargs('compiler.has_function', *_COMMON_KWS)
+    def has_function_method(self, args: T.Tuple[str], kwargs: 'CommonKW') -> bool:
+        funcname = args[0]
+        extra_args = self._determine_args(kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+        deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=False)
+        had, cached = self.compiler.has_function(funcname, kwargs['prefix'], self.environment,
+                                                 extra_args=extra_args,
+                                                 dependencies=deps)
+        cached_msg = mlog.blue('(cached)') if cached else ''
+        if had:
+            hadtxt = mlog.green('YES')
+        else:
+            hadtxt = mlog.red('NO')
+        mlog.log('Checking for function', mlog.bold(funcname, True), msg, hadtxt, cached_msg)
+        return had
+
+    @typed_pos_args('compiler.has_type', str)
+    @typed_kwargs('compiler.has_type', *_COMMON_KWS)
+    def has_type_method(self, args: T.Tuple[str], kwargs: 'CommonKW') -> bool:
+        typename = args[0]
+        extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+        deps, msg = self._determine_dependencies(kwargs['dependencies'])
+        had, cached = self.compiler.has_type(typename, kwargs['prefix'], self.environment,
+                                             extra_args=extra_args, dependencies=deps)
+        cached_msg = mlog.blue('(cached)') if cached else ''
+        if had:
+            hadtxt = mlog.green('YES')
+        else:
+            hadtxt = mlog.red('NO')
+        mlog.log('Checking for type', mlog.bold(typename, True), msg, hadtxt, cached_msg)
+        return had
+
+    @FeatureNew('compiler.compute_int', '0.40.0')
+    @typed_pos_args('compiler.compute_int', str)
+    @typed_kwargs(
+        'compiler.compute_int',
+        KwargInfo('low', (int, NoneType)),
+        KwargInfo('high', (int, NoneType)),
+        KwargInfo('guess', (int, NoneType)),
+        *_COMMON_KWS,
+    )
+    def compute_int_method(self, args: T.Tuple[str], kwargs: 'ComputeIntKW') -> int:
+        expression = args[0]
+        extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+        deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=self.compiler.is_cross)
+        res = self.compiler.compute_int(expression, kwargs['low'], kwargs['high'],
+                                        kwargs['guess'], kwargs['prefix'],
+                                        self.environment, extra_args=extra_args,
+                                        dependencies=deps)
+        mlog.log('Computing int of', mlog.bold(expression, True), msg, res)
+        return res
+
+    @typed_pos_args('compiler.sizeof', str)
+    @typed_kwargs('compiler.sizeof', *_COMMON_KWS)
+    def sizeof_method(self, args: T.Tuple[str], kwargs: 'CommonKW') -> int:
+        element = args[0]
+        extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+        deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=self.compiler.is_cross)
+        esize, cached = self.compiler.sizeof(element, kwargs['prefix'], self.environment,
+                                             extra_args=extra_args, dependencies=deps)
+        cached_msg = mlog.blue('(cached)') if cached else ''
+        mlog.log('Checking for size of',
+                 mlog.bold(element, True), msg, mlog.bold(str(esize)), cached_msg)
+        return esize
+
+    @FeatureNew('compiler.get_define', '0.40.0')
+    @typed_pos_args('compiler.get_define', str)
+    @typed_kwargs('compiler.get_define', *_COMMON_KWS)
+    def get_define_method(self, args: T.Tuple[str], kwargs: 'CommonKW') -> str:
+        element = args[0]
+        extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+        deps, msg = self._determine_dependencies(kwargs['dependencies'])
+        value, cached = self.compiler.get_define(element, kwargs['prefix'], self.environment,
+                                                 extra_args=extra_args,
+                                                 dependencies=deps)
+        cached_msg = mlog.blue('(cached)') if cached else ''
+        mlog.log('Fetching value of define', mlog.bold(element, True), msg, value, cached_msg)
+        return value
+
+    @typed_pos_args('compiler.compiles', (str, mesonlib.File))
+    @typed_kwargs('compiler.compiles', *_COMPILES_KWS)
+    def compiles_method(self, args: T.Tuple['mesonlib.FileOrString'], kwargs: 'CompileKW') -> bool:
+        code = args[0]
+        if isinstance(code, mesonlib.File):
+            if code.is_built:
+                FeatureNew.single_use('compiler.compiles with file created at setup time', '1.2.0', self.subproject,
+                                      'It was broken and either errored or returned false.', self.current_node)
+            self.interpreter.add_build_def_file(code)
+            code = mesonlib.File.from_absolute_file(
+                code.absolute_path(self.environment.source_dir, self.environment.build_dir))
+        testname = kwargs['name']
+        extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+        deps, msg = self._determine_dependencies(kwargs['dependencies'], endl=None)
+        result, cached = self.compiler.compiles(code, self.environment,
+                                                extra_args=extra_args,
+                                                dependencies=deps)
+        if testname:
+            if result:
+                h = mlog.green('YES')
+            else:
+                h = mlog.red('NO')
+            cached_msg = mlog.blue('(cached)') if cached else ''
+            mlog.log('Checking if', mlog.bold(testname, True), msg, 'compiles:', h, cached_msg)
+        return result
+
+    @typed_pos_args('compiler.links', (str, mesonlib.File))
+    @typed_kwargs('compiler.links', *_COMPILES_KWS)
+    def links_method(self, args: T.Tuple['mesonlib.FileOrString'], kwargs: 'CompileKW') -> bool:
+        code = args[0]
+        compiler = None
+        if isinstance(code, mesonlib.File):
+            if code.is_built:
+                FeatureNew.single_use('compiler.links with file created at setup time', '1.2.0', self.subproject,
+                                      'It was broken and either errored or returned false.', self.current_node)
+            self.interpreter.add_build_def_file(code)
+            code = mesonlib.File.from_absolute_file(
+                code.absolute_path(self.environment.source_dir, self.environment.build_dir))
+            suffix = code.suffix
+            if suffix not in self.compiler.file_suffixes:
+                for_machine = self.compiler.for_machine
+                clist = self.interpreter.coredata.compilers[for_machine]
+                if suffix not in SUFFIX_TO_LANG:
+                    # just pass it to the compiler driver
+                    mlog.warning(f'Unknown suffix for test file {code}')
+                elif SUFFIX_TO_LANG[suffix] not in clist:
+                    mlog.warning(f'Passed {SUFFIX_TO_LANG[suffix]} source to links method, not specified for {for_machine.get_lower_case_name()} machine.')
+                else:
+                    compiler = clist[SUFFIX_TO_LANG[suffix]]
+
+        testname = kwargs['name']
+        extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+        deps, msg = self._determine_dependencies(kwargs['dependencies'], compile_only=False)
+        result, cached = self.compiler.links(code, self.environment,
+                                             compiler=compiler,
+                                             extra_args=extra_args,
+                                             dependencies=deps)
+        cached_msg = mlog.blue('(cached)') if cached else ''
+        if testname:
+            if result:
+                h = mlog.green('YES')
+            else:
+                h = mlog.red('NO')
+            mlog.log('Checking if', mlog.bold(testname, True), msg, 'links:', h, cached_msg)
+        return result
+
+    @FeatureNew('compiler.check_header', '0.47.0')
+    @typed_pos_args('compiler.check_header', str)
+    @typed_kwargs('compiler.check_header', *_HEADER_KWS)
+    def check_header_method(self, args: T.Tuple[str], kwargs: 'HeaderKW') -> bool:
+        hname = args[0]
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
+        if disabled:
+            mlog.log('Check usable header', mlog.bold(hname, True), 'skipped: feature', mlog.bold(feature), 'disabled')
+            return False
+        extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+        deps, msg = self._determine_dependencies(kwargs['dependencies'])
+        haz, cached = self.compiler.check_header(hname, kwargs['prefix'], self.environment,
+                                                 extra_args=extra_args,
+                                                 dependencies=deps)
+        cached_msg = mlog.blue('(cached)') if cached else ''
+        if required and not haz:
+            raise InterpreterException(f'{self.compiler.get_display_language()} header {hname!r} not usable')
+        elif haz:
+            h = mlog.green('YES')
+        else:
+            h = mlog.red('NO')
+        mlog.log('Check usable header', mlog.bold(hname, True), msg, h, cached_msg)
+        return haz
+
+    def _has_header_impl(self, hname: str, kwargs: 'HeaderKW') -> bool:
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
+        if disabled:
+            mlog.log('Has header', mlog.bold(hname, True), 'skipped: feature', mlog.bold(feature), 'disabled')
+            return False
+        extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+        deps, msg = self._determine_dependencies(kwargs['dependencies'])
+        haz, cached = self.compiler.has_header(hname, kwargs['prefix'], self.environment,
+                                               extra_args=extra_args, dependencies=deps)
+        cached_msg = mlog.blue('(cached)') if cached else ''
+        if required and not haz:
+            raise InterpreterException(f'{self.compiler.get_display_language()} header {hname!r} not found')
+        elif haz:
+            h = mlog.green('YES')
+        else:
+            h = mlog.red('NO')
+        mlog.log('Has header', mlog.bold(hname, True), msg, h, cached_msg)
+        return haz
+
+    @typed_pos_args('compiler.has_header', str)
+    @typed_kwargs('compiler.has_header', *_HEADER_KWS)
+    def has_header_method(self, args: T.Tuple[str], kwargs: 'HeaderKW') -> bool:
+        return self._has_header_impl(args[0], kwargs)
+
+    @typed_pos_args('compiler.has_header_symbol', str, str)
+    @typed_kwargs('compiler.has_header_symbol', *_HEADER_KWS)
+    def has_header_symbol_method(self, args: T.Tuple[str, str], kwargs: 'HeaderKW') -> bool:
+        hname, symbol = args
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject, default=False)
+        if disabled:
+            mlog.log('Header', mlog.bold(hname, True), 'has symbol', mlog.bold(symbol, True), 'skipped: feature', mlog.bold(feature), 'disabled')
+            return False
+        extra_args = functools.partial(self._determine_args, kwargs['no_builtin_args'], kwargs['include_directories'], kwargs['args'])
+        deps, msg = self._determine_dependencies(kwargs['dependencies'])
+        haz, cached = self.compiler.has_header_symbol(hname, symbol, kwargs['prefix'], self.environment,
+                                                      extra_args=extra_args,
+                                                      dependencies=deps)
+        if required and not haz:
+            raise InterpreterException(f'{self.compiler.get_display_language()} symbol {symbol} not found in header {hname}')
+        elif haz:
+            h = mlog.green('YES')
+        else:
+            h = mlog.red('NO')
+        cached_msg = mlog.blue('(cached)') if cached else ''
+        mlog.log('Header', mlog.bold(hname, True), 'has symbol', mlog.bold(symbol, True), msg, h, cached_msg)
+        return haz
+
+    def notfound_library(self, libname: str) -> 'dependencies.ExternalLibrary':
+        lib = dependencies.ExternalLibrary(libname, None,
+                                           self.environment,
+                                           self.compiler.language,
+                                           silent=True)
+        return lib
+
+    @disablerIfNotFound
+    @typed_pos_args('compiler.find_library', str)
+    @typed_kwargs(
+        'compiler.find_library',
+        KwargInfo('required', (bool, coredata.UserFeatureOption), default=True),
+        KwargInfo('has_headers', ContainerTypeInfo(list, str), listify=True, default=[], since='0.50.0'),
+        KwargInfo('static', (bool, NoneType), since='0.51.0'),
+        KwargInfo('disabler', bool, default=False, since='0.49.0'),
+        KwargInfo('dirs', ContainerTypeInfo(list, str), listify=True, default=[]),
+        *(k.evolve(name=f'header_{k.name}') for k in _HEADER_KWS)
+    )
+    def find_library_method(self, args: T.Tuple[str], kwargs: 'FindLibraryKW') -> 'dependencies.ExternalLibrary':
+        # TODO add dependencies support?
+        libname = args[0]
+
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+        if disabled:
+            mlog.log('Library', mlog.bold(libname), 'skipped: feature', mlog.bold(feature), 'disabled')
+            return self.notfound_library(libname)
+
+        # This could be done with a comprehension, but that confuses the type
+        # checker, and having it check this seems valuable
+        has_header_kwargs: 'HeaderKW' = {
+            'required': required,
+            'args': kwargs['header_args'],
+            'dependencies': kwargs['header_dependencies'],
+            'include_directories': kwargs['header_include_directories'],
+            'prefix': kwargs['header_prefix'],
+            'no_builtin_args': kwargs['header_no_builtin_args'],
+        }
+        for h in kwargs['has_headers']:
+            if not self._has_header_impl(h, has_header_kwargs):
+                return self.notfound_library(libname)
+
+        search_dirs = extract_search_dirs(kwargs)
+
+        prefer_static = self.environment.coredata.get_option(OptionKey('prefer_static'))
+        if kwargs['static'] is True:
+            libtype = mesonlib.LibType.STATIC
+        elif kwargs['static'] is False:
+            libtype = mesonlib.LibType.SHARED
+        elif prefer_static:
+            libtype = mesonlib.LibType.PREFER_STATIC
+        else:
+            libtype = mesonlib.LibType.PREFER_SHARED
+        linkargs = self.compiler.find_library(libname, self.environment, search_dirs, libtype)
+        if required and not linkargs:
+            if libtype == mesonlib.LibType.PREFER_SHARED:
+                libtype_s = 'shared or static'
+            else:
+                libtype_s = libtype.name.lower()
+            raise InterpreterException('{} {} library {!r} not found'
+                                       .format(self.compiler.get_display_language(),
+                                               libtype_s, libname))
+        lib = dependencies.ExternalLibrary(libname, linkargs, self.environment,
+                                           self.compiler.language)
+        return lib
+
+    def _has_argument_impl(self, arguments: T.Union[str, T.List[str]],
+                           mode: _TestMode = _TestMode.COMPILER) -> bool:
+        """Shared implementation for methods checking compiler and linker arguments."""
+        # This simplifies the callers
+        if isinstance(arguments, str):
+            arguments = [arguments]
+        test = self.compiler.has_multi_link_arguments if mode is _TestMode.LINKER else self.compiler.has_multi_arguments
+        result, cached = test(arguments, self.environment)
+        cached_msg = mlog.blue('(cached)') if cached else ''
+        mlog.log(
+            'Compiler for',
+            self.compiler.get_display_language(),
+            'supports{}'.format(' link' if mode is _TestMode.LINKER else ''),
+            'arguments {}:'.format(' '.join(arguments)),
+            mlog.green('YES') if result else mlog.red('NO'),
+            cached_msg)
+        return result
+
+    @noKwargs
+    @typed_pos_args('compiler.has_argument', str)
+    def has_argument_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool:
+        return self._has_argument_impl([args[0]])
+
+    @noKwargs
+    @typed_pos_args('compiler.has_multi_arguments', varargs=str)
+    @FeatureNew('compiler.has_multi_arguments', '0.37.0')
+    def has_multi_arguments_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> bool:
+        return self._has_argument_impl(args[0])
+
+    @FeatureNew('compiler.get_supported_arguments', '0.43.0')
+    @typed_pos_args('compiler.get_supported_arguments', varargs=str)
+    @typed_kwargs(
+        'compiler.get_supported_arguments',
+        KwargInfo('checked', str, default='off', since='0.59.0',
+                  validator=in_set_validator({'warn', 'require', 'off'})),
+    )
+    def get_supported_arguments_method(self, args: T.Tuple[T.List[str]], kwargs: 'GetSupportedArgumentKw') -> T.List[str]:
+        supported_args: T.List[str] = []
+        checked = kwargs['checked']
+
+        for arg in args[0]:
+            if not self._has_argument_impl([arg]):
+                msg = f'Compiler for {self.compiler.get_display_language()} does not support "{arg}"'
+                if checked == 'warn':
+                    mlog.warning(msg)
+                elif checked == 'require':
+                    raise mesonlib.MesonException(msg)
+            else:
+                supported_args.append(arg)
+        return supported_args
+
+    @noKwargs
+    @typed_pos_args('compiler.first_supported_argument', varargs=str)
+    def first_supported_argument_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[str]:
+        for arg in args[0]:
+            if self._has_argument_impl([arg]):
+                mlog.log('First supported argument:', mlog.bold(arg))
+                return [arg]
+        mlog.log('First supported argument:', mlog.red('None'))
+        return []
+
+    @FeatureNew('compiler.has_link_argument', '0.46.0')
+    @noKwargs
+    @typed_pos_args('compiler.has_link_argument', str)
+    def has_link_argument_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool:
+        return self._has_argument_impl([args[0]], mode=_TestMode.LINKER)
+
+    @FeatureNew('compiler.has_multi_link_argument', '0.46.0')
+    @noKwargs
+    @typed_pos_args('compiler.has_multi_link_argument', varargs=str)
+    def has_multi_link_arguments_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> bool:
+        return self._has_argument_impl(args[0], mode=_TestMode.LINKER)
+
+    @FeatureNew('compiler.get_supported_link_arguments', '0.46.0')
+    @noKwargs
+    @typed_pos_args('compiler.get_supported_link_arguments', varargs=str)
+    def get_supported_link_arguments_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[str]:
+        supported_args: T.List[str] = []
+        for arg in args[0]:
+            if self._has_argument_impl([arg], mode=_TestMode.LINKER):
+                supported_args.append(arg)
+        return supported_args
+
+    @FeatureNew('compiler.first_supported_link_argument_method', '0.46.0')
+    @noKwargs
+    @typed_pos_args('compiler.first_supported_link_argument', varargs=str)
+    def first_supported_link_argument_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[str]:
+        for arg in args[0]:
+            if self._has_argument_impl([arg], mode=_TestMode.LINKER):
+                mlog.log('First supported link argument:', mlog.bold(arg))
+                return [arg]
+        mlog.log('First supported link argument:', mlog.red('None'))
+        return []
+
+    def _has_function_attribute_impl(self, attr: str) -> bool:
+        """Common helper for function attribute testing."""
+        result, cached = self.compiler.has_func_attribute(attr, self.environment)
+        cached_msg = mlog.blue('(cached)') if cached else ''
+        h = mlog.green('YES') if result else mlog.red('NO')
+        mlog.log(f'Compiler for {self.compiler.get_display_language()} supports function attribute {attr}:', h, cached_msg)
+        return result
+
+    @FeatureNew('compiler.has_function_attribute', '0.48.0')
+    @noKwargs
+    @typed_pos_args('compiler.has_function_attribute', str)
+    def has_func_attribute_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool:
+        return self._has_function_attribute_impl(args[0])
+
+    @FeatureNew('compiler.get_supported_function_attributes', '0.48.0')
+    @noKwargs
+    @typed_pos_args('compiler.get_supported_function_attributes', varargs=str)
+    def get_supported_function_attributes_method(self, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[str]:
+        return [a for a in args[0] if self._has_function_attribute_impl(a)]
+
+    @FeatureNew('compiler.get_argument_syntax_method', '0.49.0')
+    @noPosargs
+    @noKwargs
+    def get_argument_syntax_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        return self.compiler.get_argument_syntax()
+
+    @FeatureNew('compiler.preprocess', '0.64.0')
+    @typed_pos_args('compiler.preprocess', varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList), min_varargs=1)
+    @typed_kwargs(
+        'compiler.preprocess',
+        KwargInfo('output', str, default='@PLAINNAME@.i'),
+        KwargInfo('compile_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+        _INCLUDE_DIRS_KW,
+        _DEPENDENCIES_KW.evolve(since='1.1.0'),
+    )
+    def preprocess_method(self, args: T.Tuple[T.List['mesonlib.FileOrString']], kwargs: 'PreprocessKW') -> T.List[build.CustomTargetIndex]:
+        compiler = self.compiler.get_preprocessor()
+        sources: 'SourceOutputs' = self.interpreter.source_strings_to_files(args[0])
+        if any(isinstance(s, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for s in sources):
+            FeatureNew.single_use('compiler.preprocess with generated sources', '1.1.0', self.subproject,
+                                  location=self.current_node)
+
+        tg_counter = next(self.preprocess_uid[self.interpreter.subdir])
+        if tg_counter > 0:
+            FeatureNew.single_use('compiler.preprocess used multiple times', '1.1.0', self.subproject,
+                                  location=self.current_node)
+        tg_name = f'preprocessor_{tg_counter}'
+        tg = build.CompileTarget(
+            tg_name,
+            self.interpreter.subdir,
+            self.subproject,
+            self.environment,
+            sources,
+            kwargs['output'],
+            compiler,
+            self.interpreter.backend,
+            kwargs['compile_args'],
+            kwargs['include_directories'],
+            kwargs['dependencies'])
+        self.interpreter.add_target(tg.name, tg)
+        # Expose this target as list of its outputs, so user can pass them to
+        # other targets, list outputs, etc.
+        private_dir = os.path.relpath(self.interpreter.backend.get_target_private_dir(tg), self.interpreter.subdir)
+        return [build.CustomTargetIndex(tg, os.path.join(private_dir, o)) for o in tg.outputs]
diff --git a/vendored-meson/meson/mesonbuild/interpreter/dependencyfallbacks.py b/vendored-meson/meson/mesonbuild/interpreter/dependencyfallbacks.py
new file mode 100644
index 000000000000..7ef152796c1c
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreter/dependencyfallbacks.py
@@ -0,0 +1,374 @@
+from __future__ import annotations
+
+from .interpreterobjects import extract_required_kwarg
+from .. import mlog
+from .. import dependencies
+from .. import build
+from ..wrap import WrapMode
+from ..mesonlib import OptionKey, extract_as_list, stringlistify, version_compare_many, listify
+from ..dependencies import Dependency, DependencyException, NotFoundDependency
+from ..interpreterbase import (MesonInterpreterObject, FeatureNew,
+                               InterpreterException, InvalidArguments)
+
+import typing as T
+if T.TYPE_CHECKING:
+    from .interpreter import Interpreter
+    from ..interpreterbase import TYPE_nkwargs, TYPE_nvar
+    from .interpreterobjects import SubprojectHolder
+
+
+class DependencyFallbacksHolder(MesonInterpreterObject):
+    def __init__(self, interpreter: 'Interpreter', names: T.List[str], allow_fallback: T.Optional[bool] = None,
+                 default_options: T.Optional[T.Dict[OptionKey, str]] = None) -> None:
+        super().__init__(subproject=interpreter.subproject)
+        self.interpreter = interpreter
+        self.subproject = interpreter.subproject
+        self.coredata = interpreter.coredata
+        self.build = interpreter.build
+        self.environment = interpreter.environment
+        self.wrap_resolver = interpreter.environment.wrap_resolver
+        self.allow_fallback = allow_fallback
+        self.subproject_name: T.Optional[str] = None
+        self.subproject_varname: T.Optional[str] = None
+        self.subproject_kwargs = {'default_options': default_options or {}}
+        self.names: T.List[str] = []
+        self.forcefallback: bool = False
+        self.nofallback: bool = False
+        for name in names:
+            if not name:
+                raise InterpreterException('dependency_fallbacks empty name \'\' is not allowed')
+            if '<' in name or '>' in name or '=' in name:
+                raise InvalidArguments('Characters <, > and = are forbidden in dependency names. To specify'
+                                       'version\n requirements use the \'version\' keyword argument instead.')
+            if name in self.names:
+                raise InterpreterException(f'dependency_fallbacks name {name!r} is duplicated')
+            self.names.append(name)
+        self._display_name = self.names[0] if self.names else '(anonymous)'
+
+    def set_fallback(self, fbinfo: T.Optional[T.Union[T.List[str], str]]) -> None:
+        # Legacy: This converts dependency()'s fallback kwargs.
+        if fbinfo is None:
+            return
+        if self.allow_fallback is not None:
+            raise InvalidArguments('"fallback" and "allow_fallback" arguments are mutually exclusive')
+        fbinfo = stringlistify(fbinfo)
+        if len(fbinfo) == 0:
+            # dependency('foo', fallback: []) is the same as dependency('foo', allow_fallback: false)
+            self.allow_fallback = False
+            return
+        if len(fbinfo) == 1:
+            FeatureNew.single_use('Fallback without variable name', '0.53.0', self.subproject)
+            subp_name, varname = fbinfo[0], None
+        elif len(fbinfo) == 2:
+            subp_name, varname = fbinfo
+        else:
+            raise InterpreterException('Fallback info must have one or two items.')
+        self._subproject_impl(subp_name, varname)
+
+    def _subproject_impl(self, subp_name: str, varname: str) -> None:
+        assert self.subproject_name is None
+        self.subproject_name = subp_name
+        self.subproject_varname = varname
+
+    def _do_dependency_cache(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+        name = func_args[0]
+        cached_dep = self._get_cached_dep(name, kwargs)
+        if cached_dep:
+            self._verify_fallback_consistency(cached_dep)
+        return cached_dep
+
+    def _do_dependency(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+        # Note that there is no df.dependency() method, this is called for names
+        # given as positional arguments to dependency_fallbacks(name1, ...).
+        # We use kwargs from the dependency() function, for things like version,
+        # module, etc.
+        name = func_args[0]
+        self._handle_featurenew_dependencies(name)
+        dep = dependencies.find_external_dependency(name, self.environment, kwargs)
+        if dep.found():
+            for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+            identifier = dependencies.get_dep_identifier(name, kwargs)
+            self.coredata.deps[for_machine].put(identifier, dep)
+            return dep
+        return None
+
+    def _do_existing_subproject(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+        subp_name = func_args[0]
+        varname = self.subproject_varname
+        if subp_name and self._get_subproject(subp_name):
+            return self._get_subproject_dep(subp_name, varname, kwargs)
+        return None
+
+    def _do_subproject(self, kwargs: TYPE_nkwargs, func_args: TYPE_nvar, func_kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+        if self.forcefallback:
+            mlog.log('Looking for a fallback subproject for the dependency',
+                     mlog.bold(self._display_name), 'because:\nUse of fallback dependencies is forced.')
+        elif self.nofallback:
+            mlog.log('Not looking for a fallback subproject for the dependency',
+                     mlog.bold(self._display_name), 'because:\nUse of fallback dependencies is disabled.')
+            return None
+        else:
+            mlog.log('Looking for a fallback subproject for the dependency',
+                     mlog.bold(self._display_name))
+
+        # dependency('foo', static: true) should implicitly add
+        # default_options: ['default_library=static']
+        static = kwargs.get('static')
+        default_options = func_kwargs.get('default_options', {})
+        if static is not None and 'default_library' not in default_options:
+            default_library = 'static' if static else 'shared'
+            mlog.log(f'Building fallback subproject with default_library={default_library}')
+            default_options[OptionKey('default_library')] = default_library
+            func_kwargs['default_options'] = default_options
+
+        # Configure the subproject
+        subp_name = self.subproject_name
+        varname = self.subproject_varname
+        func_kwargs.setdefault('version', [])
+        if 'default_options' in kwargs and isinstance(kwargs['default_options'], str):
+            func_kwargs['default_options'] = listify(kwargs['default_options'])
+        self.interpreter.do_subproject(subp_name, 'meson', func_kwargs)
+        return self._get_subproject_dep(subp_name, varname, kwargs)
+
+    def _get_subproject(self, subp_name: str) -> T.Optional[SubprojectHolder]:
+        sub = self.interpreter.subprojects.get(subp_name)
+        if sub and sub.found():
+            return sub
+        return None
+
+    def _get_subproject_dep(self, subp_name: str, varname: str, kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+        # Verify the subproject is found
+        subproject = self._get_subproject(subp_name)
+        if not subproject:
+            mlog.log('Dependency', mlog.bold(self._display_name), 'from subproject',
+                     mlog.bold(subp_name), 'found:', mlog.red('NO'),
+                     mlog.blue('(subproject failed to configure)'))
+            return None
+
+        # The subproject has been configured. If for any reason the dependency
+        # cannot be found in this subproject we have to return not-found object
+        # instead of None, because we don't want to continue the lookup on the
+        # system.
+
+        # Check if the subproject overridden at least one of the names we got.
+        cached_dep = None
+        for name in self.names:
+            cached_dep = self._get_cached_dep(name, kwargs)
+            if cached_dep:
+                break
+
+        # If we have cached_dep we did all the checks and logging already in
+        # self._get_cached_dep().
+        if cached_dep:
+            self._verify_fallback_consistency(cached_dep)
+            return cached_dep
+
+        # Legacy: Use the variable name if provided instead of relying on the
+        # subproject to override one of our dependency names
+        if not varname:
+            # If no variable name is specified, check if the wrap file has one.
+            # If the wrap file has a variable name, better use it because the
+            # subproject most probably is not using meson.override_dependency().
+            for name in self.names:
+                varname = self.wrap_resolver.get_varname(subp_name, name)
+                if varname:
+                    break
+        if not varname:
+            mlog.warning(f'Subproject {subp_name!r} did not override {self._display_name!r} dependency and no variable name specified')
+            mlog.log('Dependency', mlog.bold(self._display_name), 'from subproject',
+                     mlog.bold(subproject.subdir), 'found:', mlog.red('NO'))
+            return self._notfound_dependency()
+
+        var_dep = self._get_subproject_variable(subproject, varname) or self._notfound_dependency()
+        if not var_dep.found():
+            mlog.log('Dependency', mlog.bold(self._display_name), 'from subproject',
+                     mlog.bold(subproject.subdir), 'found:', mlog.red('NO'))
+            return var_dep
+
+        wanted = stringlistify(kwargs.get('version', []))
+        found = var_dep.get_version()
+        if not self._check_version(wanted, found):
+            mlog.log('Dependency', mlog.bold(self._display_name), 'from subproject',
+                     mlog.bold(subproject.subdir), 'found:', mlog.red('NO'),
+                     'found', mlog.normal_cyan(found), 'but need:',
+                     mlog.bold(', '.join([f"'{e}'" for e in wanted])))
+            return self._notfound_dependency()
+
+        mlog.log('Dependency', mlog.bold(self._display_name), 'from subproject',
+                 mlog.bold(subproject.subdir), 'found:', mlog.green('YES'),
+                 mlog.normal_cyan(found) if found else None)
+        return var_dep
+
+    def _get_cached_dep(self, name: str, kwargs: TYPE_nkwargs) -> T.Optional[Dependency]:
+        # Unlike other methods, this one returns not-found dependency instead
+        # of None in the case the dependency is cached as not-found, or if cached
+        # version does not match. In that case we don't want to continue with
+        # other candidates.
+        for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+        identifier = dependencies.get_dep_identifier(name, kwargs)
+        wanted_vers = stringlistify(kwargs.get('version', []))
+
+        override = self.build.dependency_overrides[for_machine].get(identifier)
+        if override:
+            info = [mlog.blue('(overridden)' if override.explicit else '(cached)')]
+            cached_dep = override.dep
+            # We don't implicitly override not-found dependencies, but user could
+            # have explicitly called meson.override_dependency() with a not-found
+            # dep.
+            if not cached_dep.found():
+                mlog.log('Dependency', mlog.bold(self._display_name),
+                         'found:', mlog.red('NO'), *info)
+                return cached_dep
+        elif self.forcefallback and self.subproject_name:
+            cached_dep = None
+        else:
+            info = [mlog.blue('(cached)')]
+            cached_dep = self.coredata.deps[for_machine].get(identifier)
+
+        if cached_dep:
+            found_vers = cached_dep.get_version()
+            if not self._check_version(wanted_vers, found_vers):
+                if not override:
+                    # We cached this dependency on disk from a previous run,
+                    # but it could got updated on the system in the meantime.
+                    return None
+                mlog.log('Dependency', mlog.bold(name),
+                         'found:', mlog.red('NO'),
+                         'found', mlog.normal_cyan(found_vers), 'but need:',
+                         mlog.bold(', '.join([f"'{e}'" for e in wanted_vers])),
+                         *info)
+                return self._notfound_dependency()
+            if found_vers:
+                info = [mlog.normal_cyan(found_vers), *info]
+            mlog.log('Dependency', mlog.bold(self._display_name),
+                     'found:', mlog.green('YES'), *info)
+            return cached_dep
+        return None
+
+    def _get_subproject_variable(self, subproject: SubprojectHolder, varname: str) -> T.Optional[Dependency]:
+        try:
+            var_dep = subproject.get_variable_method([varname], {})
+        except InvalidArguments:
+            var_dep = None
+        if not isinstance(var_dep, Dependency):
+            mlog.warning(f'Variable {varname!r} in the subproject {subproject.subdir!r} is',
+                         'not found' if var_dep is None else 'not a dependency object')
+            return None
+        return var_dep
+
+    def _verify_fallback_consistency(self, cached_dep: Dependency) -> None:
+        subp_name = self.subproject_name
+        varname = self.subproject_varname
+        subproject = self._get_subproject(subp_name)
+        if subproject and varname:
+            var_dep = self._get_subproject_variable(subproject, varname)
+            if var_dep and cached_dep.found() and var_dep != cached_dep:
+                mlog.warning(f'Inconsistency: Subproject has overridden the dependency with another variable than {varname!r}')
+
+    def _handle_featurenew_dependencies(self, name: str) -> None:
+        'Do a feature check on dependencies used by this subproject'
+        if name == 'mpi':
+            FeatureNew.single_use('MPI Dependency', '0.42.0', self.subproject)
+        elif name == 'pcap':
+            FeatureNew.single_use('Pcap Dependency', '0.42.0', self.subproject)
+        elif name == 'vulkan':
+            FeatureNew.single_use('Vulkan Dependency', '0.42.0', self.subproject)
+        elif name == 'libwmf':
+            FeatureNew.single_use('LibWMF Dependency', '0.44.0', self.subproject)
+        elif name == 'openmp':
+            FeatureNew.single_use('OpenMP Dependency', '0.46.0', self.subproject)
+
+    def _notfound_dependency(self) -> NotFoundDependency:
+        return NotFoundDependency(self.names[0] if self.names else '', self.environment)
+
+    @staticmethod
+    def _check_version(wanted: T.List[str], found: str) -> bool:
+        if not wanted:
+            return True
+        return not (found == 'undefined' or not version_compare_many(found, wanted)[0])
+
+    def _get_candidates(self) -> T.List[T.Tuple[T.Callable[[TYPE_nkwargs, TYPE_nvar, TYPE_nkwargs], T.Optional[Dependency]], TYPE_nvar, TYPE_nkwargs]]:
+        candidates = []
+        # 1. check if any of the names is cached already.
+        for name in self.names:
+            candidates.append((self._do_dependency_cache, [name], {}))
+        # 2. check if the subproject fallback has already been configured.
+        if self.subproject_name:
+            candidates.append((self._do_existing_subproject, [self.subproject_name], self.subproject_kwargs))
+        # 3. check external dependency if we are not forced to use subproject
+        if not self.forcefallback or not self.subproject_name:
+            for name in self.names:
+                candidates.append((self._do_dependency, [name], {}))
+        # 4. configure the subproject
+        if self.subproject_name:
+            candidates.append((self._do_subproject, [self.subproject_name], self.subproject_kwargs))
+        return candidates
+
+    def lookup(self, kwargs: TYPE_nkwargs, force_fallback: bool = False) -> Dependency:
+        mods = extract_as_list(kwargs, 'modules')
+        if mods:
+            self._display_name += ' (modules: {})'.format(', '.join(str(i) for i in mods))
+
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+        if disabled:
+            mlog.log('Dependency', mlog.bold(self._display_name), 'skipped: feature', mlog.bold(feature), 'disabled')
+            return self._notfound_dependency()
+
+        # Check if usage of the subproject fallback is forced
+        wrap_mode = self.coredata.get_option(OptionKey('wrap_mode'))
+        assert isinstance(wrap_mode, WrapMode), 'for mypy'
+        force_fallback_for = self.coredata.get_option(OptionKey('force_fallback_for'))
+        assert isinstance(force_fallback_for, list), 'for mypy'
+        self.nofallback = wrap_mode == WrapMode.nofallback
+        self.forcefallback = (force_fallback or
+                              wrap_mode == WrapMode.forcefallback or
+                              any(name in force_fallback_for for name in self.names) or
+                              self.subproject_name in force_fallback_for)
+
+        # Add an implicit subproject fallback if none has been set explicitly,
+        # unless implicit fallback is not allowed.
+        # Legacy: self.allow_fallback can be None when that kwarg is not defined
+        # in dependency('name'). In that case we don't want to use implicit
+        # fallback when required is false because user will typically fallback
+        # manually using cc.find_library() for example.
+        if not self.subproject_name and self.allow_fallback is not False:
+            for name in self.names:
+                subp_name, varname = self.wrap_resolver.find_dep_provider(name)
+                if subp_name:
+                    self.forcefallback |= subp_name in force_fallback_for
+                    if self.forcefallback or self.allow_fallback is True or required or self._get_subproject(subp_name):
+                        self._subproject_impl(subp_name, varname)
+                    break
+
+        candidates = self._get_candidates()
+
+        # writing just "dependency('')" is an error, because it can only fail
+        if not candidates and required:
+            raise InvalidArguments('Dependency is required but has no candidates.')
+
+        # Try all candidates, only the last one is really required.
+        last = len(candidates) - 1
+        for i, item in enumerate(candidates):
+            func, func_args, func_kwargs = item
+            func_kwargs['required'] = required and (i == last)
+            kwargs['required'] = required and (i == last)
+            dep = func(kwargs, func_args, func_kwargs)
+            if dep and dep.found():
+                # Override this dependency to have consistent results in subsequent
+                # dependency lookups.
+                for name in self.names:
+                    for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+                    identifier = dependencies.get_dep_identifier(name, kwargs)
+                    if identifier not in self.build.dependency_overrides[for_machine]:
+                        self.build.dependency_overrides[for_machine][identifier] = \
+                            build.DependencyOverride(dep, self.interpreter.current_node, explicit=False)
+                return dep
+            elif required and (dep or i == last):
+                # This was the last candidate or the dependency has been cached
+                # as not-found, or cached dependency version does not match,
+                # otherwise func() would have returned None instead.
+                raise DependencyException(f'Dependency {self._display_name!r} is required but not found.')
+            elif dep:
+                # Same as above, but the dependency is not required.
+                return dep
+        return self._notfound_dependency()
diff --git a/vendored-meson/meson/mesonbuild/interpreter/interpreter.py b/vendored-meson/meson/mesonbuild/interpreter/interpreter.py
new file mode 100644
index 000000000000..235129289751
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreter/interpreter.py
@@ -0,0 +1,3404 @@
+# Copyright 2012-2021 The Meson development team
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from .. import mparser
+from .. import environment
+from .. import coredata
+from .. import dependencies
+from .. import mlog
+from .. import build
+from .. import optinterpreter
+from .. import compilers
+from .. import envconfig
+from ..wrap import wrap, WrapMode
+from .. import mesonlib
+from ..mesonlib import (MesonBugException, MesonException, HoldableObject,
+                        FileMode, MachineChoice, OptionKey, listify,
+                        extract_as_list, has_path_sep, PerMachine)
+from ..programs import ExternalProgram, NonExistingExternalProgram
+from ..dependencies import Dependency
+from ..depfile import DepFile
+from ..interpreterbase import ContainerTypeInfo, InterpreterBase, KwargInfo, typed_kwargs, typed_pos_args
+from ..interpreterbase import noPosargs, noKwargs, permittedKwargs, noArgsFlattening, noSecondLevelHolderResolving, unholder_return
+from ..interpreterbase import InterpreterException, InvalidArguments, InvalidCode, SubdirDoneRequest
+from ..interpreterbase import Disabler, disablerIfNotFound
+from ..interpreterbase import FeatureNew, FeatureDeprecated, FeatureBroken, FeatureNewKwargs, FeatureDeprecatedKwargs
+from ..interpreterbase import ObjectHolder, ContextManagerObject
+from ..modules import ExtensionModule, ModuleObject, MutableModuleObject, NewExtensionModule, NotFoundExtensionModule
+from ..backend.backends import ExecutableSerialisation
+
+from . import interpreterobjects as OBJ
+from . import compiler as compilerOBJ
+from .mesonmain import MesonMain
+from .dependencyfallbacks import DependencyFallbacksHolder
+from .interpreterobjects import (
+    SubprojectHolder,
+    Test,
+    RunProcess,
+    extract_required_kwarg,
+    extract_search_dirs,
+    NullSubprojectInterpreter,
+)
+from .type_checking import (
+    COMMAND_KW,
+    CT_BUILD_ALWAYS,
+    CT_BUILD_ALWAYS_STALE,
+    CT_BUILD_BY_DEFAULT,
+    CT_INPUT_KW,
+    CT_INSTALL_DIR_KW,
+    MULTI_OUTPUT_KW,
+    OUTPUT_KW,
+    DEFAULT_OPTIONS,
+    DEPENDENCIES_KW,
+    DEPENDS_KW,
+    DEPEND_FILES_KW,
+    DEPFILE_KW,
+    DISABLER_KW,
+    D_MODULE_VERSIONS_KW,
+    ENV_KW,
+    ENV_METHOD_KW,
+    ENV_SEPARATOR_KW,
+    INCLUDE_DIRECTORIES,
+    INSTALL_KW,
+    INSTALL_DIR_KW,
+    INSTALL_MODE_KW,
+    LINK_WITH_KW,
+    LINK_WHOLE_KW,
+    CT_INSTALL_TAG_KW,
+    INSTALL_TAG_KW,
+    LANGUAGE_KW,
+    NATIVE_KW,
+    OVERRIDE_OPTIONS_KW,
+    PRESERVE_PATH_KW,
+    REQUIRED_KW,
+    SOURCES_KW,
+    VARIABLES_KW,
+    TEST_KWS,
+    NoneType,
+    in_set_validator,
+    env_convertor_with_method
+)
+from . import primitives as P_OBJ
+
+from pathlib import Path
+from enum import Enum
+import os
+import shutil
+import uuid
+import re
+import stat
+import collections
+import typing as T
+import textwrap
+import importlib
+import copy
+
+if T.TYPE_CHECKING:
+    import argparse
+
+    from typing_extensions import Literal
+
+    from . import kwargs as kwtypes
+    from ..backend.backends import Backend
+    from ..interpreterbase.baseobjects import InterpreterObject, TYPE_var, TYPE_kwargs
+    from ..programs import OverrideProgram
+
+    # Input source types passed to Targets
+    SourceInputs = T.Union[mesonlib.File, build.GeneratedList, build.BuildTarget, build.BothLibraries,
+                           build.CustomTargetIndex, build.CustomTarget, build.GeneratedList,
+                           build.ExtractedObjects, str]
+    # Input source types passed to the build.Target classes
+    SourceOutputs = T.Union[mesonlib.File, build.GeneratedList,
+                            build.BuildTarget, build.CustomTargetIndex, build.CustomTarget,
+                            build.ExtractedObjects, build.GeneratedList, build.StructuredSources]
+
+    BuildTargetSource = T.Union[mesonlib.FileOrString, build.GeneratedTypes, build.StructuredSources]
+
+
+def _project_version_validator(value: T.Union[T.List, str, mesonlib.File, None]) -> T.Optional[str]:
+    if isinstance(value, list):
+        if len(value) != 1:
+            return 'when passed as array must have a length of 1'
+        elif not isinstance(value[0], mesonlib.File):
+            return 'when passed as array must contain a File'
+    return None
+
+
+def stringifyUserArguments(args: T.List[T.Any], quote: bool = False) -> str:
+    if isinstance(args, list):
+        return '[%s]' % ', '.join([stringifyUserArguments(x, True) for x in args])
+    elif isinstance(args, dict):
+        return '{%s}' % ', '.join(['{} : {}'.format(stringifyUserArguments(k, True), stringifyUserArguments(v, True)) for k, v in args.items()])
+    elif isinstance(args, bool):
+        return 'true' if args else 'false'
+    elif isinstance(args, int):
+        return str(args)
+    elif isinstance(args, str):
+        return f"'{args}'" if quote else args
+    raise InvalidArguments('Function accepts only strings, integers, bools, lists, dictionaries and lists thereof.')
+
+class Summary:
+    def __init__(self, project_name: str, project_version: str):
+        self.project_name = project_name
+        self.project_version = project_version
+        self.sections = collections.defaultdict(dict)
+        self.max_key_len = 0
+
+    def add_section(self, section: str, values: T.Dict[str, T.Any], bool_yn: bool,
+                    list_sep: T.Optional[str], subproject: str) -> None:
+        for k, v in values.items():
+            if k in self.sections[section]:
+                raise InterpreterException(f'Summary section {section!r} already have key {k!r}')
+            formatted_values = []
+            for i in listify(v):
+                if isinstance(i, bool) and bool_yn:
+                    formatted_values.append(mlog.green('YES') if i else mlog.red('NO'))
+                elif isinstance(i, (str, int, bool)):
+                    formatted_values.append(str(i))
+                elif isinstance(i, (ExternalProgram, Dependency)):
+                    FeatureNew.single_use('dependency or external program in summary', '0.57.0', subproject)
+                    formatted_values.append(i.summary_value())
+                elif isinstance(i, Disabler):
+                    FeatureNew.single_use('disabler in summary', '0.64.0', subproject)
+                    formatted_values.append(mlog.red('NO'))
+                elif isinstance(i, coredata.UserOption):
+                    FeatureNew.single_use('feature option in summary', '0.58.0', subproject)
+                    formatted_values.append(i.printable_value())
+                else:
+                    m = 'Summary value in section {!r}, key {!r}, must be string, integer, boolean, dependency, disabler, or external program'
+                    raise InterpreterException(m.format(section, k))
+            self.sections[section][k] = (formatted_values, list_sep)
+            self.max_key_len = max(self.max_key_len, len(k))
+
+    def dump(self):
+        mlog.log(self.project_name, mlog.normal_cyan(self.project_version))
+        for section, values in self.sections.items():
+            mlog.log('')  # newline
+            if section:
+                mlog.log(' ', mlog.bold(section))
+            for k, v in values.items():
+                v, list_sep = v
+                padding = self.max_key_len - len(k)
+                end = ' ' if v else ''
+                mlog.log(' ' * 3, k + ' ' * padding + ':', end=end)
+                indent = self.max_key_len + 6
+                self.dump_value(v, list_sep, indent)
+        mlog.log('')  # newline
+
+    def dump_value(self, arr, list_sep, indent):
+        lines_sep = '\n' + ' ' * indent
+        if list_sep is None:
+            mlog.log(*arr, sep=lines_sep)
+            return
+        max_len = shutil.get_terminal_size().columns
+        line = []
+        line_len = indent
+        lines_sep = list_sep.rstrip() + lines_sep
+        for v in arr:
+            v_len = len(v) + len(list_sep)
+            if line and line_len + v_len > max_len:
+                mlog.log(*line, sep=list_sep, end=lines_sep)
+                line_len = indent
+                line = []
+            line.append(v)
+            line_len += v_len
+        mlog.log(*line, sep=list_sep)
+
+known_library_kwargs = (
+    build.known_shlib_kwargs |
+    build.known_stlib_kwargs
+)
+
+known_build_target_kwargs = (
+    known_library_kwargs |
+    build.known_exe_kwargs |
+    build.known_jar_kwargs |
+    {'target_type'}
+)
+
+class InterpreterRuleRelaxation(Enum):
+    ''' Defines specific relaxations of the Meson rules.
+
+    This is intended to be used for automatically converted
+    projects (CMake subprojects, build system mixing) that
+    generate a Meson AST via introspection, etc.
+    '''
+
+    ALLOW_BUILD_DIR_FILE_REFERENCES = 1
+
+permitted_dependency_kwargs = {
+    'allow_fallback',
+    'cmake_args',
+    'cmake_module_path',
+    'cmake_package_version',
+    'components',
+    'default_options',
+    'fallback',
+    'include_type',
+    'language',
+    'main',
+    'method',
+    'modules',
+    'native',
+    'not_found_message',
+    'optional_modules',
+    'private_headers',
+    'required',
+    'static',
+    'version',
+}
+
+implicit_check_false_warning = """You should add the boolean check kwarg to the run_command call.
+         It currently defaults to false,
+         but it will default to true in future releases of meson.
+         See also: https://github.com/mesonbuild/meson/issues/9300"""
+class Interpreter(InterpreterBase, HoldableObject):
+
+    def __init__(
+                self,
+                _build: build.Build,
+                backend: T.Optional[Backend] = None,
+                subproject: str = '',
+                subdir: str = '',
+                subproject_dir: str = 'subprojects',
+                default_project_options: T.Optional[T.Dict[OptionKey, str]] = None,
+                mock: bool = False,
+                ast: T.Optional[mparser.CodeBlockNode] = None,
+                is_translated: bool = False,
+                relaxations: T.Optional[T.Set[InterpreterRuleRelaxation]] = None,
+                user_defined_options: T.Optional['argparse.Namespace'] = None,
+            ) -> None:
+        super().__init__(_build.environment.get_source_dir(), subdir, subproject)
+        self.active_projectname = ''
+        self.build = _build
+        self.environment = self.build.environment
+        self.coredata = self.environment.get_coredata()
+        self.backend = backend
+        self.summary: T.Dict[str, 'Summary'] = {}
+        self.modules: T.Dict[str, NewExtensionModule] = {}
+        # Subproject directory is usually the name of the subproject, but can
+        # be different for dependencies provided by wrap files.
+        self.subproject_directory_name = subdir.split(os.path.sep)[-1]
+        self.subproject_dir = subproject_dir
+        self.relaxations = relaxations or set()
+        if not mock and ast is None:
+            self.load_root_meson_file()
+            self.sanity_check_ast()
+        elif ast is not None:
+            self.ast = ast
+            self.sanity_check_ast()
+        self.builtin.update({'meson': MesonMain(self.build, self)})
+        self.generators: T.List[build.Generator] = []
+        self.processed_buildfiles = set() # type: T.Set[str]
+        self.project_args_frozen = False
+        self.global_args_frozen = False  # implies self.project_args_frozen
+        self.subprojects: T.Dict[str, SubprojectHolder] = {}
+        self.subproject_stack: T.List[str] = []
+        self.configure_file_outputs: T.Dict[str, int] = {}
+        # Passed from the outside, only used in subprojects.
+        if default_project_options:
+            self.default_project_options = default_project_options.copy()
+        else:
+            self.default_project_options = {}
+        self.project_default_options: T.Dict[OptionKey, str] = {}
+        self.build_func_dict()
+        self.build_holder_map()
+        self.user_defined_options = user_defined_options
+        self.compilers: PerMachine[T.Dict[str, 'compilers.Compiler']] = PerMachine({}, {})
+
+        # build_def_files needs to be defined before parse_project is called
+        #
+        # For non-meson subprojects, we'll be using the ast. Even if it does
+        # exist we don't want to add a dependency on it, it's autogenerated
+        # from the actual build files, and is just for reference.
+        self.build_def_files: mesonlib.OrderedSet[str] = mesonlib.OrderedSet()
+        build_filename = os.path.join(self.subdir, environment.build_filename)
+        if not is_translated:
+            self.build_def_files.add(build_filename)
+        if not mock:
+            self.parse_project()
+        self._redetect_machines()
+
+    def __getnewargs_ex__(self) -> T.Tuple[T.Tuple[object], T.Dict[str, object]]:
+        raise MesonBugException('This class is unpicklable')
+
+    def _redetect_machines(self) -> None:
+        # Re-initialize machine descriptions. We can do a better job now because we
+        # have the compilers needed to gain more knowledge, so wipe out old
+        # inference and start over.
+        machines = self.build.environment.machines.miss_defaulting()
+        machines.build = environment.detect_machine_info(self.coredata.compilers.build)
+        self.build.environment.machines = machines.default_missing()
+        assert self.build.environment.machines.build.cpu is not None
+        assert self.build.environment.machines.host.cpu is not None
+        assert self.build.environment.machines.target.cpu is not None
+
+        self.builtin['build_machine'] = \
+            OBJ.MachineHolder(self.build.environment.machines.build, self)
+        self.builtin['host_machine'] = \
+            OBJ.MachineHolder(self.build.environment.machines.host, self)
+        self.builtin['target_machine'] = \
+            OBJ.MachineHolder(self.build.environment.machines.target, self)
+
+    def build_func_dict(self) -> None:
+        self.funcs.update({'add_global_arguments': self.func_add_global_arguments,
+                           'add_global_link_arguments': self.func_add_global_link_arguments,
+                           'add_languages': self.func_add_languages,
+                           'add_project_arguments': self.func_add_project_arguments,
+                           'add_project_dependencies': self.func_add_project_dependencies,
+                           'add_project_link_arguments': self.func_add_project_link_arguments,
+                           'add_test_setup': self.func_add_test_setup,
+                           'alias_target': self.func_alias_target,
+                           'assert': self.func_assert,
+                           'benchmark': self.func_benchmark,
+                           'both_libraries': self.func_both_lib,
+                           'build_target': self.func_build_target,
+                           'configuration_data': self.func_configuration_data,
+                           'configure_file': self.func_configure_file,
+                           'custom_target': self.func_custom_target,
+                           'debug': self.func_debug,
+                           'declare_dependency': self.func_declare_dependency,
+                           'dependency': self.func_dependency,
+                           'disabler': self.func_disabler,
+                           'environment': self.func_environment,
+                           'error': self.func_error,
+                           'executable': self.func_executable,
+                           'files': self.func_files,
+                           'find_library': self.func_find_library,
+                           'find_program': self.func_find_program,
+                           'generator': self.func_generator,
+                           'get_option': self.func_get_option,
+                           'get_variable': self.func_get_variable,
+                           'gettext': self.func_gettext,
+                           'import': self.func_import,
+                           'include_directories': self.func_include_directories,
+                           'install_data': self.func_install_data,
+                           'install_emptydir': self.func_install_emptydir,
+                           'install_headers': self.func_install_headers,
+                           'install_man': self.func_install_man,
+                           'install_subdir': self.func_install_subdir,
+                           'install_symlink': self.func_install_symlink,
+                           'is_disabler': self.func_is_disabler,
+                           'is_variable': self.func_is_variable,
+                           'jar': self.func_jar,
+                           'join_paths': self.func_join_paths,
+                           'library': self.func_library,
+                           'message': self.func_message,
+                           'option': self.func_option,
+                           'project': self.func_project,
+                           'range': self.func_range,
+                           'run_command': self.func_run_command,
+                           'run_target': self.func_run_target,
+                           'set_variable': self.func_set_variable,
+                           'structured_sources': self.func_structured_sources,
+                           'subdir': self.func_subdir,
+                           'shared_library': self.func_shared_lib,
+                           'shared_module': self.func_shared_module,
+                           'static_library': self.func_static_lib,
+                           'subdir_done': self.func_subdir_done,
+                           'subproject': self.func_subproject,
+                           'summary': self.func_summary,
+                           'test': self.func_test,
+                           'unset_variable': self.func_unset_variable,
+                           'vcs_tag': self.func_vcs_tag,
+                           'warning': self.func_warning,
+                           })
+        if 'MESON_UNIT_TEST' in os.environ:
+            self.funcs.update({'exception': self.func_exception})
+        if 'MESON_RUNNING_IN_PROJECT_TESTS' in os.environ:
+            self.funcs.update({'expect_error': self.func_expect_error})
+
+    def build_holder_map(self) -> None:
+        '''
+            Build a mapping of `HoldableObject` types to their corresponding
+            `ObjectHolder`s. This mapping is used in `InterpreterBase` to automatically
+            holderify all returned values from methods and functions.
+        '''
+        self.holder_map.update({
+            # Primitives
+            list: P_OBJ.ArrayHolder,
+            dict: P_OBJ.DictHolder,
+            int: P_OBJ.IntegerHolder,
+            bool: P_OBJ.BooleanHolder,
+            str: P_OBJ.StringHolder,
+            P_OBJ.MesonVersionString: P_OBJ.MesonVersionStringHolder,
+            P_OBJ.DependencyVariableString: P_OBJ.DependencyVariableStringHolder,
+            P_OBJ.OptionString: P_OBJ.OptionStringHolder,
+
+            # Meson types
+            mesonlib.File: OBJ.FileHolder,
+            build.SharedLibrary: OBJ.SharedLibraryHolder,
+            build.StaticLibrary: OBJ.StaticLibraryHolder,
+            build.BothLibraries: OBJ.BothLibrariesHolder,
+            build.SharedModule: OBJ.SharedModuleHolder,
+            build.Executable: OBJ.ExecutableHolder,
+            build.Jar: OBJ.JarHolder,
+            build.CustomTarget: OBJ.CustomTargetHolder,
+            build.CustomTargetIndex: OBJ.CustomTargetIndexHolder,
+            build.Generator: OBJ.GeneratorHolder,
+            build.GeneratedList: OBJ.GeneratedListHolder,
+            build.ExtractedObjects: OBJ.GeneratedObjectsHolder,
+            build.RunTarget: OBJ.RunTargetHolder,
+            build.AliasTarget: OBJ.AliasTargetHolder,
+            build.Headers: OBJ.HeadersHolder,
+            build.Man: OBJ.ManHolder,
+            build.EmptyDir: OBJ.EmptyDirHolder,
+            build.Data: OBJ.DataHolder,
+            build.SymlinkData: OBJ.SymlinkDataHolder,
+            build.InstallDir: OBJ.InstallDirHolder,
+            build.IncludeDirs: OBJ.IncludeDirsHolder,
+            build.EnvironmentVariables: OBJ.EnvironmentVariablesHolder,
+            build.StructuredSources: OBJ.StructuredSourcesHolder,
+            compilers.RunResult: compilerOBJ.TryRunResultHolder,
+            dependencies.ExternalLibrary: OBJ.ExternalLibraryHolder,
+            coredata.UserFeatureOption: OBJ.FeatureOptionHolder,
+            envconfig.MachineInfo: OBJ.MachineHolder,
+            build.ConfigurationData: OBJ.ConfigurationDataHolder,
+        })
+
+        '''
+            Build a mapping of `HoldableObject` base classes to their
+            corresponding `ObjectHolder`s. The difference to `self.holder_map`
+            is that the keys here define an upper bound instead of requiring an
+            exact match.
+
+            The mappings defined here are only used when there was no direct hit
+            found in `self.holder_map`.
+        '''
+        self.bound_holder_map.update({
+            dependencies.Dependency: OBJ.DependencyHolder,
+            ExternalProgram: OBJ.ExternalProgramHolder,
+            compilers.Compiler: compilerOBJ.CompilerHolder,
+            ModuleObject: OBJ.ModuleObjectHolder,
+            MutableModuleObject: OBJ.MutableModuleObjectHolder,
+        })
+
+    def append_holder_map(self, held_type: T.Type[mesonlib.HoldableObject], holder_type: T.Type[ObjectHolder]) -> None:
+        '''
+            Adds one additional mapping to the `holder_map`.
+
+            The intended use for this function is in the `initialize` method of
+            modules to register custom object holders.
+        '''
+        self.holder_map.update({
+            held_type: holder_type
+        })
+
+    def process_new_values(self, invalues: T.List[T.Union[TYPE_var, ExecutableSerialisation]]) -> None:
+        invalues = listify(invalues)
+        for v in invalues:
+            if isinstance(v, ObjectHolder):
+                raise InterpreterException('Modules must not return ObjectHolders')
+            if isinstance(v, (build.BuildTarget, build.CustomTarget, build.RunTarget)):
+                self.add_target(v.name, v)
+            elif isinstance(v, list):
+                self.process_new_values(v)
+            elif isinstance(v, ExecutableSerialisation):
+                v.subproject = self.subproject
+                self.build.install_scripts.append(v)
+            elif isinstance(v, build.Data):
+                self.build.data.append(v)
+            elif isinstance(v, build.SymlinkData):
+                self.build.symlinks.append(v)
+            elif isinstance(v, dependencies.InternalDependency):
+                # FIXME: This is special cased and not ideal:
+                # The first source is our new VapiTarget, the rest are deps
+                self.process_new_values(v.sources[0])
+            elif isinstance(v, build.InstallDir):
+                self.build.install_dirs.append(v)
+            elif isinstance(v, Test):
+                self.build.tests.append(v)
+            elif isinstance(v, (int, str, bool, Disabler, ObjectHolder, build.GeneratedList,
+                                ExternalProgram, build.ConfigurationData)):
+                pass
+            else:
+                raise InterpreterException(f'Module returned a value of unknown type {v!r}.')
+
+    def handle_meson_version(self, pv: str, location: mparser.BaseNode) -> None:
+        if not mesonlib.version_compare(coredata.stable_version, pv):
+            raise InterpreterException.from_node(f'Meson version is {coredata.version} but project requires {pv}', node=location)
+        mesonlib.project_meson_versions[self.subproject] = pv
+
+    def handle_meson_version_from_ast(self) -> None:
+        if not self.ast.lines:
+            return
+        project = self.ast.lines[0]
+        # first line is always project()
+        if not isinstance(project, mparser.FunctionNode):
+            return
+        for kw, val in project.args.kwargs.items():
+            assert isinstance(kw, mparser.IdNode), 'for mypy'
+            if kw.value == 'meson_version':
+                # mypy does not understand "and isinstance"
+                if isinstance(val, mparser.StringNode):
+                    self.handle_meson_version(val.value, val)
+
+    def get_build_def_files(self) -> mesonlib.OrderedSet[str]:
+        return self.build_def_files
+
+    def add_build_def_file(self, f: mesonlib.FileOrString) -> None:
+        # Use relative path for files within source directory, and absolute path
+        # for system files. Skip files within build directory. Also skip not regular
+        # files (e.g. /dev/stdout) Normalize the path to avoid duplicates, this
+        # is especially important to convert '/' to '\' on Windows.
+        if isinstance(f, mesonlib.File):
+            if f.is_built:
+                return
+            f = os.path.normpath(f.relative_name())
+        elif os.path.isfile(f) and not f.startswith('/dev'):
+            srcdir = Path(self.environment.get_source_dir())
+            builddir = Path(self.environment.get_build_dir())
+            try:
+                f_ = Path(f).resolve()
+            except OSError:
+                f_ = Path(f)
+                s = f_.stat()
+                if (hasattr(s, 'st_file_attributes') and
+                        s.st_file_attributes & stat.FILE_ATTRIBUTE_REPARSE_POINT != 0 and
+                        s.st_reparse_tag == stat.IO_REPARSE_TAG_APPEXECLINK):
+                    # This is a Windows Store link which we can't
+                    # resolve, so just do our best otherwise.
+                    f_ = f_.parent.resolve() / f_.name
+                else:
+                    raise
+            if builddir in f_.parents:
+                return
+            if srcdir in f_.parents:
+                f_ = f_.relative_to(srcdir)
+            f = str(f_)
+        else:
+            return
+        if f not in self.build_def_files:
+            self.build_def_files.add(f)
+
+    def get_variables(self) -> T.Dict[str, InterpreterObject]:
+        return self.variables
+
+    def check_stdlibs(self) -> None:
+        machine_choices = [MachineChoice.HOST]
+        if self.coredata.is_cross_build():
+            machine_choices.append(MachineChoice.BUILD)
+        for for_machine in machine_choices:
+            props = self.build.environment.properties[for_machine]
+            for l in self.coredata.compilers[for_machine].keys():
+                try:
+                    di = mesonlib.stringlistify(props.get_stdlib(l))
+                except KeyError:
+                    continue
+                if len(di) == 1:
+                    FeatureNew.single_use('stdlib without variable name', '0.56.0', self.subproject, location=self.current_node)
+                kwargs = {'native': for_machine is MachineChoice.BUILD,
+                          }
+                name = l + '_stdlib'
+                df = DependencyFallbacksHolder(self, [name])
+                df.set_fallback(di)
+                dep = df.lookup(kwargs, force_fallback=True)
+                self.build.stdlibs[for_machine][l] = dep
+
+    @typed_pos_args('import', str)
+    @typed_kwargs(
+        'import',
+        REQUIRED_KW.evolve(since='0.59.0'),
+        DISABLER_KW.evolve(since='0.59.0'),
+    )
+    @disablerIfNotFound
+    def func_import(self, node: mparser.BaseNode, args: T.Tuple[str],
+                    kwargs: 'kwtypes.FuncImportModule') -> T.Union[ExtensionModule, NewExtensionModule, NotFoundExtensionModule]:
+        modname = args[0]
+        disabled, required, _ = extract_required_kwarg(kwargs, self.subproject)
+        if disabled:
+            return NotFoundExtensionModule(modname)
+
+        expect_unstable = False
+        # Some tests use "unstable_" instead of "unstable-", and that happens to work because
+        # of implementation details
+        if modname.startswith(('unstable-', 'unstable_')):
+            if modname.startswith('unstable_'):
+                mlog.deprecation(f'Importing unstable modules as "{modname}" instead of "{modname.replace("_", "-", 1)}"',
+                                 location=node)
+            real_modname = modname[len('unstable') + 1:]  # + 1 to handle the - or _
+            expect_unstable = True
+        else:
+            real_modname = modname
+
+        if real_modname in self.modules:
+            return self.modules[real_modname]
+        try:
+            module = importlib.import_module(f'mesonbuild.modules.{real_modname}')
+        except ImportError:
+            if required:
+                raise InvalidArguments(f'Module "{modname}" does not exist')
+            ext_module = NotFoundExtensionModule(real_modname)
+        else:
+            ext_module = module.initialize(self)
+            assert isinstance(ext_module, (ExtensionModule, NewExtensionModule))
+            self.build.modules.append(real_modname)
+        if ext_module.INFO.added:
+            FeatureNew.single_use(f'module {ext_module.INFO.name}', ext_module.INFO.added, self.subproject, location=node)
+        if ext_module.INFO.deprecated:
+            FeatureDeprecated.single_use(f'module {ext_module.INFO.name}', ext_module.INFO.deprecated, self.subproject, location=node)
+        if expect_unstable and not ext_module.INFO.unstable and ext_module.INFO.stabilized is None:
+            raise InvalidArguments(f'Module {ext_module.INFO.name} has never been unstable, remove "unstable-" prefix.')
+        if ext_module.INFO.stabilized is not None:
+            if expect_unstable:
+                FeatureDeprecated.single_use(
+                    f'module {ext_module.INFO.name} has been stabilized',
+                    ext_module.INFO.stabilized, self.subproject,
+                    'drop "unstable-" prefix from the module name',
+                    location=node)
+            else:
+                FeatureNew.single_use(
+                    f'module {ext_module.INFO.name} as stable module',
+                    ext_module.INFO.stabilized, self.subproject,
+                    f'Consider either adding "unstable-" to the module name, or updating the meson required version to ">= {ext_module.INFO.stabilized}"',
+                    location=node)
+        elif ext_module.INFO.unstable:
+            if not expect_unstable:
+                if required:
+                    raise InvalidArguments(f'Module "{ext_module.INFO.name}" has not been stabilized, and must be imported as unstable-{ext_module.INFO.name}')
+                ext_module = NotFoundExtensionModule(real_modname)
+            else:
+                mlog.warning(f'Module {ext_module.INFO.name} has no backwards or forwards compatibility and might not exist in future releases.', location=node, fatal=False)
+
+        self.modules[real_modname] = ext_module
+        return ext_module
+
+    @typed_pos_args('files', varargs=str)
+    @noKwargs
+    def func_files(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> T.List[mesonlib.File]:
+        return self.source_strings_to_files(args[0])
+
+    @noPosargs
+    @typed_kwargs(
+        'declare_dependency',
+        KwargInfo('compile_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+        INCLUDE_DIRECTORIES.evolve(name='d_import_dirs', since='0.62.0'),
+        D_MODULE_VERSIONS_KW.evolve(since='0.62.0'),
+        KwargInfo('link_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+        DEPENDENCIES_KW,
+        INCLUDE_DIRECTORIES,
+        LINK_WITH_KW,
+        LINK_WHOLE_KW.evolve(since='0.46.0'),
+        SOURCES_KW,
+        KwargInfo('extra_files', ContainerTypeInfo(list, (mesonlib.File, str)), listify=True, default=[], since='1.2.0'),
+        VARIABLES_KW.evolve(since='0.54.0', since_values={list: '0.56.0'}),
+        KwargInfo('version', (str, NoneType)),
+        KwargInfo('objects', ContainerTypeInfo(list, build.ExtractedObjects), listify=True, default=[], since='1.1.0'),
+    )
+    def func_declare_dependency(self, node, args, kwargs):
+        deps = kwargs['dependencies']
+        incs = self.extract_incdirs(kwargs)
+        libs = kwargs['link_with']
+        libs_whole = kwargs['link_whole']
+        objects = kwargs['objects']
+        sources = self.source_strings_to_files(kwargs['sources'])
+        extra_files = self.source_strings_to_files(kwargs['extra_files'])
+        compile_args = kwargs['compile_args']
+        link_args = kwargs['link_args']
+        variables = kwargs['variables']
+        version = kwargs['version']
+        if version is None:
+            version = self.project_version
+        d_module_versions = kwargs['d_module_versions']
+        d_import_dirs = self.extract_incdirs(kwargs, 'd_import_dirs')
+        srcdir = Path(self.environment.source_dir)
+        # convert variables which refer to an -uninstalled.pc style datadir
+        for k, v in variables.items():
+            try:
+                p = Path(v)
+            except ValueError:
+                continue
+            else:
+                if not self.is_subproject() and srcdir / self.subproject_dir in p.parents:
+                    continue
+                if p.is_absolute() and p.is_dir() and srcdir / self.root_subdir in [p] + list(Path(os.path.abspath(p)).parents):
+                    variables[k] = P_OBJ.DependencyVariableString(v)
+        for d in deps:
+            if not isinstance(d, dependencies.Dependency):
+                raise InterpreterException('Invalid dependency')
+
+        dep = dependencies.InternalDependency(version, incs, compile_args,
+                                              link_args, libs, libs_whole, sources, extra_files,
+                                              deps, variables, d_module_versions, d_import_dirs,
+                                              objects)
+        return dep
+
+    @typed_pos_args('assert', bool, optargs=[str])
+    @noKwargs
+    def func_assert(self, node: mparser.FunctionNode, args: T.Tuple[bool, T.Optional[str]],
+                    kwargs: 'TYPE_kwargs') -> None:
+        value, message = args
+        if message is None:
+            FeatureNew.single_use('assert function without message argument', '0.53.0', self.subproject, location=node)
+
+        if not value:
+            if message is None:
+                from ..ast import AstPrinter
+                printer = AstPrinter()
+                node.args.arguments[0].accept(printer)
+                message = printer.result
+            raise InterpreterException('Assert failed: ' + message)
+
+    def validate_arguments(self, args, argcount, arg_types):
+        if argcount is not None:
+            if argcount != len(args):
+                raise InvalidArguments(f'Expected {argcount} arguments, got {len(args)}.')
+        for actual, wanted in zip(args, arg_types):
+            if wanted is not None:
+                if not isinstance(actual, wanted):
+                    raise InvalidArguments('Incorrect argument type.')
+
+    # Executables aren't actually accepted, but we allow them here to allow for
+    # better error messages when overridden
+    @typed_pos_args(
+        'run_command',
+        (build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str),
+        varargs=(build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str))
+    @typed_kwargs(
+        'run_command',
+        KwargInfo('check', (bool, NoneType), since='0.47.0'),
+        KwargInfo('capture', bool, default=True, since='0.47.0'),
+        ENV_KW.evolve(since='0.50.0'),
+    )
+    def func_run_command(self, node: mparser.BaseNode,
+                         args: T.Tuple[T.Union[build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str],
+                                       T.List[T.Union[build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str]]],
+                         kwargs: 'kwtypes.RunCommand') -> RunProcess:
+        return self.run_command_impl(node, args, kwargs)
+
+    def run_command_impl(self,
+                         node: mparser.BaseNode,
+                         args: T.Tuple[T.Union[build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str],
+                                       T.List[T.Union[build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str]]],
+                         kwargs: 'kwtypes.RunCommand',
+                         in_builddir: bool = False) -> RunProcess:
+        cmd, cargs = args
+        capture = kwargs['capture']
+        env = kwargs['env']
+        srcdir = self.environment.get_source_dir()
+        builddir = self.environment.get_build_dir()
+
+        check = kwargs['check']
+        if check is None:
+            mlog.warning(implicit_check_false_warning, once=True)
+            check = False
+
+        overridden_msg = ('Program {!r} was overridden with the compiled '
+                          'executable {!r} and therefore cannot be used during '
+                          'configuration')
+        expanded_args: T.List[str] = []
+        if isinstance(cmd, build.Executable):
+            for name, exe in self.build.find_overrides.items():
+                if cmd == exe:
+                    progname = name
+                    break
+            else:
+                raise InterpreterException(f'Program {cmd.description()!r} is a compiled executable and therefore cannot be used during configuration')
+            raise InterpreterException(overridden_msg.format(progname, cmd.description()))
+        if isinstance(cmd, ExternalProgram):
+            if not cmd.found():
+                raise InterpreterException(f'command {cmd.get_name()!r} not found or not executable')
+        elif isinstance(cmd, compilers.Compiler):
+            exelist = cmd.get_exelist()
+            cmd = exelist[0]
+            prog = ExternalProgram(cmd, silent=True)
+            if not prog.found():
+                raise InterpreterException(f'Program {cmd!r} not found or not executable')
+            cmd = prog
+            expanded_args = exelist[1:]
+        else:
+            if isinstance(cmd, mesonlib.File):
+                cmd = cmd.absolute_path(srcdir, builddir)
+            # Prefer scripts in the current source directory
+            search_dir = os.path.join(srcdir, self.subdir)
+            prog = ExternalProgram(cmd, silent=True, search_dir=search_dir)
+            if not prog.found():
+                raise InterpreterException(f'Program or command {cmd!r} not found or not executable')
+            cmd = prog
+        for a in cargs:
+            if isinstance(a, str):
+                expanded_args.append(a)
+            elif isinstance(a, mesonlib.File):
+                expanded_args.append(a.absolute_path(srcdir, builddir))
+            elif isinstance(a, ExternalProgram):
+                expanded_args.append(a.get_path())
+            elif isinstance(a, compilers.Compiler):
+                FeatureNew.single_use('Compiler object as a variadic argument to `run_command`', '0.61.0', self.subproject, location=node)
+                prog = ExternalProgram(a.exelist[0], silent=True)
+                if not prog.found():
+                    raise InterpreterException(f'Program {cmd!r} not found or not executable')
+                expanded_args.append(prog.get_path())
+            else:
+                raise InterpreterException(overridden_msg.format(a.name, cmd.description()))
+
+        # If any file that was used as an argument to the command
+        # changes, we must re-run the configuration step.
+        self.add_build_def_file(cmd.get_path())
+        for a in expanded_args:
+            if not os.path.isabs(a):
+                a = os.path.join(builddir if in_builddir else srcdir, self.subdir, a)
+            self.add_build_def_file(a)
+
+        return RunProcess(cmd, expanded_args, env, srcdir, builddir, self.subdir,
+                          self.environment.get_build_command() + ['introspect'],
+                          in_builddir=in_builddir, check=check, capture=capture)
+
+    def func_gettext(self, nodes, args, kwargs):
+        raise InterpreterException('Gettext() function has been moved to module i18n. Import it and use i18n.gettext() instead')
+
+    def func_option(self, nodes, args, kwargs):
+        raise InterpreterException('Tried to call option() in build description file. All options must be in the option file.')
+
+    @typed_pos_args('subproject', str)
+    @typed_kwargs(
+        'subproject',
+        REQUIRED_KW,
+        DEFAULT_OPTIONS.evolve(since='0.38.0'),
+        KwargInfo('version', ContainerTypeInfo(list, str), default=[], listify=True),
+    )
+    def func_subproject(self, nodes: mparser.BaseNode, args: T.Tuple[str], kwargs: kwtypes.Subproject) -> SubprojectHolder:
+        kw: kwtypes.DoSubproject = {
+            'required': kwargs['required'],
+            'default_options': kwargs['default_options'],
+            'version': kwargs['version'],
+            'options': None,
+            'cmake_options': [],
+        }
+        return self.do_subproject(args[0], 'meson', kw)
+
+    def disabled_subproject(self, subp_name: str, disabled_feature: T.Optional[str] = None,
+                            exception: T.Optional[Exception] = None) -> SubprojectHolder:
+        sub = SubprojectHolder(NullSubprojectInterpreter(), os.path.join(self.subproject_dir, subp_name),
+                               disabled_feature=disabled_feature, exception=exception)
+        self.subprojects[subp_name] = sub
+        return sub
+
+    def do_subproject(self, subp_name: str, method: Literal['meson', 'cmake'], kwargs: kwtypes.DoSubproject) -> SubprojectHolder:
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+        if disabled:
+            mlog.log('Subproject', mlog.bold(subp_name), ':', 'skipped: feature', mlog.bold(feature), 'disabled')
+            return self.disabled_subproject(subp_name, disabled_feature=feature)
+
+        default_options = {k.evolve(subproject=subp_name): v for k, v in kwargs['default_options'].items()}
+
+        if subp_name == '':
+            raise InterpreterException('Subproject name must not be empty.')
+        if subp_name[0] == '.':
+            raise InterpreterException('Subproject name must not start with a period.')
+        if '..' in subp_name:
+            raise InterpreterException('Subproject name must not contain a ".." path segment.')
+        if os.path.isabs(subp_name):
+            raise InterpreterException('Subproject name must not be an absolute path.')
+        if has_path_sep(subp_name):
+            mlog.warning('Subproject name has a path separator. This may cause unexpected behaviour.',
+                         location=self.current_node)
+        if subp_name in self.subproject_stack:
+            fullstack = self.subproject_stack + [subp_name]
+            incpath = ' => '.join(fullstack)
+            raise InvalidCode(f'Recursive include of subprojects: {incpath}.')
+        if subp_name in self.subprojects:
+            subproject = self.subprojects[subp_name]
+            if required and not subproject.found():
+                raise InterpreterException(f'Subproject "{subproject.subdir}" required but not found.')
+            if kwargs['version']:
+                pv = self.build.subprojects[subp_name]
+                wanted = kwargs['version']
+                if pv == 'undefined' or not mesonlib.version_compare_many(pv, wanted)[0]:
+                    raise InterpreterException(f'Subproject {subp_name} version is {pv} but {wanted} required.')
+            return subproject
+
+        r = self.environment.wrap_resolver
+        try:
+            subdir = r.resolve(subp_name, method)
+        except wrap.WrapException as e:
+            if not required:
+                mlog.log(e)
+                mlog.log('Subproject ', mlog.bold(subp_name), 'is buildable:', mlog.red('NO'), '(disabling)')
+                return self.disabled_subproject(subp_name, exception=e)
+            raise e
+
+        subdir_abs = os.path.join(self.environment.get_source_dir(), subdir)
+        os.makedirs(os.path.join(self.build.environment.get_build_dir(), subdir), exist_ok=True)
+        self.global_args_frozen = True
+
+        stack = ':'.join(self.subproject_stack + [subp_name])
+        m = ['\nExecuting subproject', mlog.bold(stack)]
+        if method != 'meson':
+            m += ['method', mlog.bold(method)]
+        mlog.log(*m, '\n', nested=False)
+
+        try:
+            if method == 'meson':
+                return self._do_subproject_meson(subp_name, subdir, default_options, kwargs)
+            elif method == 'cmake':
+                return self._do_subproject_cmake(subp_name, subdir, subdir_abs, default_options, kwargs)
+            else:
+                raise mesonlib.MesonBugException(f'The method {method} is invalid for the subproject {subp_name}')
+        # Invalid code is always an error
+        except InvalidCode:
+            raise
+        except Exception as e:
+            if not required:
+                with mlog.nested(subp_name):
+                    # Suppress the 'ERROR:' prefix because this exception is not
+                    # fatal and VS CI treat any logs with "ERROR:" as fatal.
+                    mlog.exception(e, prefix=mlog.yellow('Exception:'))
+                mlog.log('\nSubproject', mlog.bold(subdir), 'is buildable:', mlog.red('NO'), '(disabling)')
+                return self.disabled_subproject(subp_name, exception=e)
+            raise e
+
+    def _do_subproject_meson(self, subp_name: str, subdir: str,
+                             default_options: T.Dict[OptionKey, str],
+                             kwargs: kwtypes.DoSubproject,
+                             ast: T.Optional[mparser.CodeBlockNode] = None,
+                             build_def_files: T.Optional[T.List[str]] = None,
+                             is_translated: bool = False,
+                             relaxations: T.Optional[T.Set[InterpreterRuleRelaxation]] = None) -> SubprojectHolder:
+        with mlog.nested(subp_name):
+            new_build = self.build.copy()
+            subi = Interpreter(new_build, self.backend, subp_name, subdir, self.subproject_dir,
+                               default_options, ast=ast, is_translated=is_translated,
+                               relaxations=relaxations,
+                               user_defined_options=self.user_defined_options)
+            # Those lists are shared by all interpreters. That means that
+            # even if the subproject fails, any modification that the subproject
+            # made to those lists will affect the parent project.
+            subi.subprojects = self.subprojects
+            subi.modules = self.modules
+            subi.holder_map = self.holder_map
+            subi.bound_holder_map = self.bound_holder_map
+            subi.summary = self.summary
+
+            subi.subproject_stack = self.subproject_stack + [subp_name]
+            current_active = self.active_projectname
+            with mlog.nested_warnings():
+                subi.run()
+                subi_warnings = mlog.get_warning_count()
+            mlog.log('Subproject', mlog.bold(subp_name), 'finished.')
+
+        mlog.log()
+
+        if kwargs['version']:
+            pv = subi.project_version
+            wanted = kwargs['version']
+            if pv == 'undefined' or not mesonlib.version_compare_many(pv, wanted)[0]:
+                raise InterpreterException(f'Subproject {subp_name} version is {pv} but {wanted} required.')
+        self.active_projectname = current_active
+        self.subprojects.update(subi.subprojects)
+        self.subprojects[subp_name] = SubprojectHolder(subi, subdir, warnings=subi_warnings,
+                                                       callstack=self.subproject_stack)
+        # Duplicates are possible when subproject uses files from project root
+        if build_def_files:
+            self.build_def_files.update(build_def_files)
+        # We always need the subi.build_def_files, to propagate sub-sub-projects
+        self.build_def_files.update(subi.build_def_files)
+        self.build.merge(subi.build)
+        self.build.subprojects[subp_name] = subi.project_version
+        return self.subprojects[subp_name]
+
+    def _do_subproject_cmake(self, subp_name: str, subdir: str, subdir_abs: str,
+                             default_options: T.Dict[OptionKey, str],
+                             kwargs: kwtypes.DoSubproject) -> SubprojectHolder:
+        from ..cmake import CMakeInterpreter
+        with mlog.nested(subp_name):
+            new_build = self.build.copy()
+            prefix = self.coredata.options[OptionKey('prefix')].value
+
+            from ..modules.cmake import CMakeSubprojectOptions
+            options = kwargs['options'] or CMakeSubprojectOptions()
+            cmake_options = kwargs['cmake_options'] + options.cmake_options
+            cm_int = CMakeInterpreter(new_build, Path(subdir), Path(subdir_abs), Path(prefix), new_build.environment, self.backend)
+            cm_int.initialise(cmake_options)
+            cm_int.analyse()
+
+            # Generate a meson ast and execute it with the normal do_subproject_meson
+            ast = cm_int.pretend_to_be_meson(options.target_options)
+
+            mlog.log()
+            with mlog.nested('cmake-ast'):
+                mlog.log('Processing generated meson AST')
+
+                # Debug print the generated meson file
+                from ..ast import AstIndentationGenerator, AstPrinter
+                printer = AstPrinter(update_ast_line_nos=True)
+                ast.accept(AstIndentationGenerator())
+                ast.accept(printer)
+                printer.post_process()
+                meson_filename = os.path.join(self.build.environment.get_build_dir(), subdir, 'meson.build')
+                with open(meson_filename, "w", encoding='utf-8') as f:
+                    f.write(printer.result)
+
+                mlog.log('Build file:', meson_filename)
+                mlog.cmd_ci_include(meson_filename)
+                mlog.log()
+
+            result = self._do_subproject_meson(
+                    subp_name, subdir, default_options,
+                    kwargs, ast,
+                    [str(f) for f in cm_int.bs_files],
+                    is_translated=True,
+                    relaxations={
+                        InterpreterRuleRelaxation.ALLOW_BUILD_DIR_FILE_REFERENCES,
+                    }
+            )
+            result.cm_interpreter = cm_int
+
+        mlog.log()
+        return result
+
+    def get_option_internal(self, optname: str) -> coredata.UserOption:
+        key = OptionKey.from_string(optname).evolve(subproject=self.subproject)
+
+        if not key.is_project():
+            for opts in [self.coredata.options, compilers.base_options]:
+                v = opts.get(key)
+                if v is None or v.yielding:
+                    v = opts.get(key.as_root())
+                if v is not None:
+                    assert isinstance(v, coredata.UserOption), 'for mypy'
+                    return v
+
+        try:
+            opt = self.coredata.options[key]
+            if opt.yielding and key.subproject and key.as_root() in self.coredata.options:
+                popt = self.coredata.options[key.as_root()]
+                if type(opt) is type(popt):
+                    opt = popt
+                else:
+                    # Get class name, then option type as a string
+                    opt_type = opt.__class__.__name__[4:][:-6].lower()
+                    popt_type = popt.__class__.__name__[4:][:-6].lower()
+                    # This is not a hard error to avoid dependency hell, the workaround
+                    # when this happens is to simply set the subproject's option directly.
+                    mlog.warning('Option {0!r} of type {1!r} in subproject {2!r} cannot yield '
+                                 'to parent option of type {3!r}, ignoring parent value. '
+                                 'Use -D{2}:{0}=value to set the value for this option manually'
+                                 '.'.format(optname, opt_type, self.subproject, popt_type),
+                                 location=self.current_node)
+            return opt
+        except KeyError:
+            pass
+
+        raise InterpreterException(f'Tried to access unknown option {optname!r}.')
+
+    @typed_pos_args('get_option', str)
+    @noKwargs
+    def func_get_option(self, nodes: mparser.BaseNode, args: T.Tuple[str],
+                        kwargs: 'TYPE_kwargs') -> T.Union[coredata.UserOption, 'TYPE_var']:
+        optname = args[0]
+        if ':' in optname:
+            raise InterpreterException('Having a colon in option name is forbidden, '
+                                       'projects are not allowed to directly access '
+                                       'options of other subprojects.')
+        opt = self.get_option_internal(optname)
+        if isinstance(opt, coredata.UserFeatureOption):
+            opt.name = optname
+            return opt
+        elif isinstance(opt, coredata.UserOption):
+            if isinstance(opt.value, str):
+                return P_OBJ.OptionString(opt.value, f'{{{optname}}}')
+            return opt.value
+        return opt
+
+    @typed_pos_args('configuration_data', optargs=[dict])
+    @noKwargs
+    def func_configuration_data(self, node: mparser.BaseNode, args: T.Tuple[T.Optional[T.Dict[str, T.Any]]],
+                                kwargs: 'TYPE_kwargs') -> build.ConfigurationData:
+        initial_values = args[0]
+        if initial_values is not None:
+            FeatureNew.single_use('configuration_data dictionary', '0.49.0', self.subproject, location=node)
+            for k, v in initial_values.items():
+                if not isinstance(v, (str, int, bool)):
+                    raise InvalidArguments(
+                        f'"configuration_data": initial value dictionary key "{k!r}"" must be "str | int | bool", not "{v!r}"')
+        return build.ConfigurationData(initial_values)
+
+    def set_backend(self) -> None:
+        # The backend is already set when parsing subprojects
+        if self.backend is not None:
+            return
+        from ..backend import backends
+
+        if self.user_defined_options and OptionKey('genvslite') in self.user_defined_options.cmd_line_options.keys():
+            # Use of the '--genvslite vsxxxx' option ultimately overrides any '--backend xxx'
+            # option the user may specify.
+            backend_name = self.coredata.get_option(OptionKey('genvslite'))
+            self.backend = backends.get_genvslite_backend(backend_name, self.build, self)
+        else:
+            backend_name = self.coredata.get_option(OptionKey('backend'))
+            self.backend = backends.get_backend_from_name(backend_name, self.build, self)
+
+        if self.backend is None:
+            raise InterpreterException(f'Unknown backend "{backend_name}".')
+        if backend_name != self.backend.name:
+            if self.backend.name.startswith('vs'):
+                mlog.log('Auto detected Visual Studio backend:', mlog.bold(self.backend.name))
+            if not self.environment.first_invocation:
+                raise MesonBugException(f'Backend changed from {backend_name} to {self.backend.name}')
+            self.coredata.set_option(OptionKey('backend'), self.backend.name, first_invocation=True)
+
+        # Only init backend options on first invocation otherwise it would
+        # override values previously set from command line.
+        if self.environment.first_invocation:
+            self.coredata.init_backend_options(backend_name)
+
+        options = {k: v for k, v in self.environment.options.items() if k.is_backend()}
+        self.coredata.set_options(options)
+
+    @typed_pos_args('project', str, varargs=str)
+    @typed_kwargs(
+        'project',
+        DEFAULT_OPTIONS,
+        KwargInfo('meson_version', (str, NoneType)),
+        KwargInfo(
+            'version',
+            (str, mesonlib.File, NoneType, list),
+            default='undefined',
+            validator=_project_version_validator,
+            convertor=lambda x: x[0] if isinstance(x, list) else x,
+        ),
+        KwargInfo('license', (ContainerTypeInfo(list, str), NoneType), default=None, listify=True),
+        KwargInfo('license_files', ContainerTypeInfo(list, str), default=[], listify=True, since='1.1.0'),
+        KwargInfo('subproject_dir', str, default='subprojects'),
+    )
+    def func_project(self, node: mparser.FunctionNode, args: T.Tuple[str, T.List[str]], kwargs: 'kwtypes.Project') -> None:
+        proj_name, proj_langs = args
+        if ':' in proj_name:
+            raise InvalidArguments(f"Project name {proj_name!r} must not contain ':'")
+
+        # This needs to be evaluated as early as possible, as meson uses this
+        # for things like deprecation testing.
+        if kwargs['meson_version']:
+            self.handle_meson_version(kwargs['meson_version'], node)
+
+        # Load "meson.options" before "meson_options.txt", and produce a warning if
+        # it is being used with an old version. I have added check that if both
+        # exist the warning isn't raised
+        option_file = os.path.join(self.source_root, self.subdir, 'meson.options')
+        old_option_file = os.path.join(self.source_root, self.subdir, 'meson_options.txt')
+
+        if os.path.exists(option_file):
+            if os.path.exists(old_option_file):
+                if os.path.samefile(option_file, old_option_file):
+                    mlog.debug("Not warning about meson.options with version minimum < 1.1 because meson_options.txt also exists")
+                else:
+                    raise MesonException("meson.options and meson_options.txt both exist, but are not the same file.")
+            else:
+                FeatureNew.single_use('meson.options file', '1.1', self.subproject, 'Use meson_options.txt instead')
+        else:
+            option_file = old_option_file
+        if os.path.exists(option_file):
+            oi = optinterpreter.OptionInterpreter(self.subproject)
+            oi.process(option_file)
+            self.coredata.update_project_options(oi.options)
+            self.add_build_def_file(option_file)
+
+        if self.subproject:
+            self.project_default_options = {k.evolve(subproject=self.subproject): v
+                                            for k, v in kwargs['default_options'].items()}
+        else:
+            self.project_default_options = kwargs['default_options']
+
+        # Do not set default_options on reconfigure otherwise it would override
+        # values previously set from command line. That means that changing
+        # default_options in a project will trigger a reconfigure but won't
+        # have any effect.
+        #
+        # If this is the first invocation we always need to initialize
+        # builtins, if this is a subproject that is new in a re-invocation we
+        # need to initialize builtins for that
+        if self.environment.first_invocation or (self.subproject != '' and self.subproject not in self.coredata.initialized_subprojects):
+            default_options = self.project_default_options.copy()
+            default_options.update(self.default_project_options)
+            self.coredata.init_builtins(self.subproject)
+            self.coredata.initialized_subprojects.add(self.subproject)
+        else:
+            default_options = {}
+        self.coredata.set_default_options(default_options, self.subproject, self.environment)
+
+        if not self.is_subproject():
+            self.build.project_name = proj_name
+        self.active_projectname = proj_name
+
+        version = kwargs['version']
+        if isinstance(version, mesonlib.File):
+            FeatureNew.single_use('version from file', '0.57.0', self.subproject, location=node)
+            self.add_build_def_file(version)
+            ifname = version.absolute_path(self.environment.source_dir,
+                                           self.environment.build_dir)
+            try:
+                ver_data = Path(ifname).read_text(encoding='utf-8').split('\n')
+            except FileNotFoundError:
+                raise InterpreterException('Version file not found.')
+            if len(ver_data) == 2 and ver_data[1] == '':
+                ver_data = ver_data[0:1]
+            if len(ver_data) != 1:
+                raise InterpreterException('Version file must contain exactly one line of text.')
+            self.project_version = ver_data[0]
+        else:
+            self.project_version = version
+
+        if self.build.project_version is None:
+            self.build.project_version = self.project_version
+
+        if kwargs['license'] is None:
+            proj_license = ['unknown']
+            if kwargs['license_files']:
+                raise InvalidArguments('Project `license` name must be specified when `license_files` is set')
+        else:
+            proj_license = kwargs['license']
+        proj_license_files = []
+        for i in self.source_strings_to_files(kwargs['license_files']):
+            ifname = i.absolute_path(self.environment.source_dir,
+                                     self.environment.build_dir)
+            proj_license_files.append((ifname, i))
+        self.build.dep_manifest[proj_name] = build.DepManifest(self.project_version, proj_license,
+                                                               proj_license_files, self.subproject)
+        if self.subproject in self.build.projects:
+            raise InvalidCode('Second call to project().')
+
+        # spdirname is the subproject_dir for this project, relative to self.subdir.
+        # self.subproject_dir is the subproject_dir for the main project, relative to top source dir.
+        spdirname = kwargs['subproject_dir']
+        if not isinstance(spdirname, str):
+            raise InterpreterException('Subproject_dir must be a string')
+        if os.path.isabs(spdirname):
+            raise InterpreterException('Subproject_dir must not be an absolute path.')
+        if spdirname.startswith('.'):
+            raise InterpreterException('Subproject_dir must not begin with a period.')
+        if '..' in spdirname:
+            raise InterpreterException('Subproject_dir must not contain a ".." segment.')
+        if not self.is_subproject():
+            self.subproject_dir = spdirname
+        self.build.subproject_dir = self.subproject_dir
+
+        # Load wrap files from this (sub)project.
+        wrap_mode = self.coredata.get_option(OptionKey('wrap_mode'))
+        if not self.is_subproject() or wrap_mode != WrapMode.nopromote:
+            subdir = os.path.join(self.subdir, spdirname)
+            r = wrap.Resolver(self.environment.get_source_dir(), subdir, self.subproject, wrap_mode)
+            if self.is_subproject():
+                self.environment.wrap_resolver.merge_wraps(r)
+            else:
+                self.environment.wrap_resolver = r
+
+        self.build.projects[self.subproject] = proj_name
+        mlog.log('Project name:', mlog.bold(proj_name))
+        mlog.log('Project version:', mlog.bold(self.project_version))
+
+        if not self.is_subproject():
+            # We have to activate VS before adding languages and before calling
+            # self.set_backend() otherwise it wouldn't be able to detect which
+            # vs backend version we need. But after setting default_options in case
+            # the project sets vs backend by default.
+            backend = self.coredata.get_option(OptionKey('backend'))
+            vsenv = self.coredata.get_option(OptionKey('vsenv'))
+            force_vsenv = vsenv or backend.startswith('vs')
+            mesonlib.setup_vsenv(force_vsenv)
+
+        self.add_languages(proj_langs, True, MachineChoice.HOST)
+        self.add_languages(proj_langs, False, MachineChoice.BUILD)
+
+        self.set_backend()
+        if not self.is_subproject():
+            self.check_stdlibs()
+
+    @typed_kwargs('add_languages', KwargInfo('native', (bool, NoneType), since='0.54.0'), REQUIRED_KW)
+    @typed_pos_args('add_languages', varargs=str)
+    def func_add_languages(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwtypes.FuncAddLanguages') -> bool:
+        langs = args[0]
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+        native = kwargs['native']
+
+        if disabled:
+            for lang in sorted(langs, key=compilers.sort_clink):
+                mlog.log('Compiler for language', mlog.bold(lang), 'skipped: feature', mlog.bold(feature), 'disabled')
+            return False
+        if native is not None:
+            return self.add_languages(langs, required, self.machine_from_native_kwarg(kwargs))
+        else:
+            # absent 'native' means 'both' for backwards compatibility
+            tv = FeatureNew.get_target_version(self.subproject)
+            if FeatureNew.check_version(tv, '0.54.0'):
+                mlog.warning('add_languages is missing native:, assuming languages are wanted for both host and build.',
+                             location=node)
+
+            success = self.add_languages(langs, False, MachineChoice.BUILD)
+            success &= self.add_languages(langs, required, MachineChoice.HOST)
+            return success
+
+    @noArgsFlattening
+    @noKwargs
+    def func_message(self, node: mparser.BaseNode, args, kwargs):
+        if len(args) > 1:
+            FeatureNew.single_use('message with more than one argument', '0.54.0', self.subproject, location=node)
+        args_str = [stringifyUserArguments(i) for i in args]
+        self.message_impl(args_str)
+
+    def message_impl(self, args):
+        mlog.log(mlog.bold('Message:'), *args)
+
+    @noArgsFlattening
+    @FeatureNew('summary', '0.53.0')
+    @typed_pos_args('summary', (str, dict), optargs=[object])
+    @typed_kwargs(
+        'summary',
+        KwargInfo('section', str, default=''),
+        KwargInfo('bool_yn', bool, default=False),
+        KwargInfo('list_sep', (str, NoneType), since='0.54.0')
+    )
+    def func_summary(self, node: mparser.BaseNode, args: T.Tuple[T.Union[str, T.Dict[str, T.Any]], T.Optional[T.Any]],
+                     kwargs: 'kwtypes.Summary') -> None:
+        if args[1] is None:
+            if not isinstance(args[0], dict):
+                raise InterpreterException('Summary first argument must be dictionary.')
+            values = args[0]
+        else:
+            if not isinstance(args[0], str):
+                raise InterpreterException('Summary first argument must be string.')
+            values = {args[0]: args[1]}
+        self.summary_impl(kwargs['section'], values, kwargs)
+
+    def summary_impl(self, section: str, values, kwargs: 'kwtypes.Summary') -> None:
+        if self.subproject not in self.summary:
+            self.summary[self.subproject] = Summary(self.active_projectname, self.project_version)
+        self.summary[self.subproject].add_section(
+            section, values, kwargs['bool_yn'], kwargs['list_sep'], self.subproject)
+
+    def _print_summary(self) -> None:
+        # Add automatic 'Subprojects' section in main project.
+        all_subprojects = collections.OrderedDict()
+        for name, subp in sorted(self.subprojects.items()):
+            value = [subp.found()]
+            if subp.disabled_feature:
+                value += [f'Feature {subp.disabled_feature!r} disabled']
+            elif subp.exception:
+                value += [str(subp.exception)]
+            elif subp.warnings > 0:
+                value += [f'{subp.warnings} warnings']
+            if subp.callstack:
+                stack = ' => '.join(subp.callstack)
+                value += [f'(from {stack})']
+            all_subprojects[name] = value
+        if all_subprojects:
+            self.summary_impl('Subprojects', all_subprojects,
+                              {'bool_yn': True,
+                               'list_sep': ' ',
+                               })
+        # Add automatic section with all user defined options
+        if self.user_defined_options:
+            values = collections.OrderedDict()
+            if self.user_defined_options.cross_file:
+                values['Cross files'] = self.user_defined_options.cross_file
+            if self.user_defined_options.native_file:
+                values['Native files'] = self.user_defined_options.native_file
+            sorted_options = sorted(self.user_defined_options.cmd_line_options.items())
+            values.update({str(k): v for k, v in sorted_options})
+            if values:
+                self.summary_impl('User defined options', values, {'bool_yn': False, 'list_sep': None})
+        # Print all summaries, main project last.
+        mlog.log('')  # newline
+        main_summary = self.summary.pop('', None)
+        for subp_name, summary in sorted(self.summary.items()):
+            if self.subprojects[subp_name].found():
+                summary.dump()
+        if main_summary:
+            main_summary.dump()
+
+    @noArgsFlattening
+    @FeatureNew('warning', '0.44.0')
+    @noKwargs
+    def func_warning(self, node, args, kwargs):
+        if len(args) > 1:
+            FeatureNew.single_use('warning with more than one argument', '0.54.0', self.subproject, location=node)
+        args_str = [stringifyUserArguments(i) for i in args]
+        mlog.warning(*args_str, location=node)
+
+    @noArgsFlattening
+    @noKwargs
+    def func_error(self, node, args, kwargs):
+        if len(args) > 1:
+            FeatureNew.single_use('error with more than one argument', '0.58.0', self.subproject, location=node)
+        args_str = [stringifyUserArguments(i) for i in args]
+        raise InterpreterException('Problem encountered: ' + ' '.join(args_str))
+
+    @noArgsFlattening
+    @FeatureNew('debug', '0.63.0')
+    @noKwargs
+    def func_debug(self, node, args, kwargs):
+        args_str = [stringifyUserArguments(i) for i in args]
+        mlog.debug('Debug:', *args_str)
+
+    @noKwargs
+    @noPosargs
+    def func_exception(self, node, args, kwargs):
+        raise RuntimeError('unit test traceback :)')
+
+    @noKwargs
+    @typed_pos_args('expect_error', str)
+    def func_expect_error(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs: TYPE_kwargs) -> ContextManagerObject:
+        class ExpectErrorObject(ContextManagerObject):
+            def __init__(self, msg: str, subproject: str) -> None:
+                super().__init__(subproject)
+                self.msg = msg
+
+            def __exit__(self, exc_type, exc_val, exc_tb):
+                if exc_val is None:
+                    raise InterpreterException('Expecting an error but code block succeeded')
+                if isinstance(exc_val, mesonlib.MesonException):
+                    msg = str(exc_val)
+                    if msg != self.msg:
+                        raise InterpreterException(f'Expecting error {self.msg!r} but got {msg!r}')
+                    return True
+        return ExpectErrorObject(args[0], self.subproject)
+
+    def add_languages(self, args: T.List[str], required: bool, for_machine: MachineChoice) -> bool:
+        success = self.add_languages_for(args, required, for_machine)
+        if not self.coredata.is_cross_build():
+            self.coredata.copy_build_options_from_regular_ones()
+        self._redetect_machines()
+        return success
+
+    def should_skip_sanity_check(self, for_machine: MachineChoice) -> bool:
+        should = self.environment.properties.host.get('skip_sanity_check', False)
+        if not isinstance(should, bool):
+            raise InterpreterException('Option skip_sanity_check must be a boolean.')
+        if for_machine != MachineChoice.HOST and not should:
+            return False
+        if not self.environment.is_cross_build() and not should:
+            return False
+        return should
+
+    def add_languages_for(self, args: T.List[str], required: bool, for_machine: MachineChoice) -> bool:
+        args = [a.lower() for a in args]
+        langs = set(self.compilers[for_machine].keys())
+        langs.update(args)
+        # We'd really like to add cython's default language here, but it can't
+        # actually be done because the cython compiler hasn't been initialized,
+        # so we can't actually get the option yet. Because we can't know what
+        # compiler to add by default, and we don't want to add unnecessary
+        # compilers we don't add anything for cython here, and instead do it
+        # When the first cython target using a particular language is used.
+        if 'vala' in langs and 'c' not in langs:
+            FeatureNew.single_use('Adding Vala language without C', '0.59.0', self.subproject, location=self.current_node)
+            args.append('c')
+        if 'nasm' in langs:
+            FeatureNew.single_use('Adding NASM language', '0.64.0', self.subproject, location=self.current_node)
+
+        success = True
+        for lang in sorted(args, key=compilers.sort_clink):
+            if lang in self.compilers[for_machine]:
+                continue
+            machine_name = for_machine.get_lower_case_name()
+            comp = self.coredata.compilers[for_machine].get(lang)
+            if not comp:
+                try:
+                    skip_sanity_check = self.should_skip_sanity_check(for_machine)
+                    if skip_sanity_check:
+                        mlog.log('Cross compiler sanity tests disabled via the cross file.', once=True)
+                    comp = compilers.detect_compiler_for(self.environment, lang, for_machine, skip_sanity_check)
+                    if comp is None:
+                        raise InvalidArguments(f'Tried to use unknown language "{lang}".')
+                except mesonlib.MesonException:
+                    if not required:
+                        mlog.log('Compiler for language',
+                                 mlog.bold(lang), 'for the', machine_name,
+                                 'machine not found.')
+                        success = False
+                        continue
+                    else:
+                        raise
+
+            # Add per-subproject compiler options. They inherit value from main project.
+            if self.subproject:
+                options = {}
+                for k in comp.get_options():
+                    v = copy.copy(self.coredata.options[k])
+                    k = k.evolve(subproject=self.subproject)
+                    options[k] = v
+                self.coredata.add_compiler_options(options, lang, for_machine, self.environment)
+
+            if for_machine == MachineChoice.HOST or self.environment.is_cross_build():
+                logger_fun = mlog.log
+            else:
+                logger_fun = mlog.debug
+            logger_fun(comp.get_display_language(), 'compiler for the', machine_name, 'machine:',
+                       mlog.bold(' '.join(comp.get_exelist())), comp.get_version_string())
+            if comp.linker is not None:
+                logger_fun(comp.get_display_language(), 'linker for the', machine_name, 'machine:',
+                           mlog.bold(' '.join(comp.linker.get_exelist())), comp.linker.id, comp.linker.version)
+            self.build.ensure_static_linker(comp)
+            self.compilers[for_machine][lang] = comp
+
+        return success
+
+    def program_from_file_for(self, for_machine: MachineChoice, prognames: T.List[mesonlib.FileOrString]
+                              ) -> T.Optional[ExternalProgram]:
+        for p in prognames:
+            if isinstance(p, mesonlib.File):
+                continue # Always points to a local (i.e. self generated) file.
+            if not isinstance(p, str):
+                raise InterpreterException('Executable name must be a string')
+            prog = ExternalProgram.from_bin_list(self.environment, for_machine, p)
+            # if the machine file specified something, it may be a regular
+            # not-found program but we still want to return that
+            if not isinstance(prog, NonExistingExternalProgram):
+                return prog
+        return None
+
+    def program_from_system(self, args: T.List[mesonlib.FileOrString], search_dirs: T.List[str],
+                            extra_info: T.List[mlog.TV_Loggable]) -> T.Optional[ExternalProgram]:
+        # Search for scripts relative to current subdir.
+        # Do not cache found programs because find_program('foobar')
+        # might give different results when run from different source dirs.
+        source_dir = os.path.join(self.environment.get_source_dir(), self.subdir)
+        for exename in args:
+            if isinstance(exename, mesonlib.File):
+                if exename.is_built:
+                    search_dir = os.path.join(self.environment.get_build_dir(),
+                                              exename.subdir)
+                else:
+                    search_dir = os.path.join(self.environment.get_source_dir(),
+                                              exename.subdir)
+                exename = exename.fname
+                extra_search_dirs = []
+            elif isinstance(exename, str):
+                search_dir = source_dir
+                extra_search_dirs = search_dirs
+            else:
+                raise InvalidArguments(f'find_program only accepts strings and files, not {exename!r}')
+            extprog = ExternalProgram(exename, search_dir=search_dir,
+                                      extra_search_dirs=extra_search_dirs,
+                                      silent=True)
+            if extprog.found():
+                extra_info.append(f"({' '.join(extprog.get_command())})")
+                return extprog
+        return None
+
+    def program_from_overrides(self, command_names: T.List[mesonlib.FileOrString],
+                               extra_info: T.List['mlog.TV_Loggable']
+                               ) -> T.Optional[T.Union[ExternalProgram, OverrideProgram, build.Executable]]:
+        for name in command_names:
+            if not isinstance(name, str):
+                continue
+            if name in self.build.find_overrides:
+                exe = self.build.find_overrides[name]
+                extra_info.append(mlog.blue('(overridden)'))
+                return exe
+        return None
+
+    def store_name_lookups(self, command_names: T.List[mesonlib.FileOrString]) -> None:
+        for name in command_names:
+            if isinstance(name, str):
+                self.build.searched_programs.add(name)
+
+    def add_find_program_override(self, name: str, exe: T.Union[build.Executable, ExternalProgram, 'OverrideProgram']) -> None:
+        if name in self.build.searched_programs:
+            raise InterpreterException(f'Tried to override finding of executable "{name}" which has already been found.')
+        if name in self.build.find_overrides:
+            raise InterpreterException(f'Tried to override executable "{name}" which has already been overridden.')
+        self.build.find_overrides[name] = exe
+
+    def notfound_program(self, args: T.List[mesonlib.FileOrString]) -> ExternalProgram:
+        return NonExistingExternalProgram(' '.join(
+            [a if isinstance(a, str) else a.absolute_path(self.environment.source_dir, self.environment.build_dir)
+             for a in args]))
+
+    # TODO update modules to always pass `for_machine`. It is bad-form to assume
+    # the host machine.
+    def find_program_impl(self, args: T.List[mesonlib.FileOrString],
+                          for_machine: MachineChoice = MachineChoice.HOST,
+                          required: bool = True, silent: bool = True,
+                          wanted: T.Union[str, T.List[str]] = '',
+                          search_dirs: T.Optional[T.List[str]] = None,
+                          version_func: T.Optional[T.Callable[[T.Union['ExternalProgram', 'build.Executable', 'OverrideProgram']], str]] = None
+                          ) -> T.Union['ExternalProgram', 'build.Executable', 'OverrideProgram']:
+        args = mesonlib.listify(args)
+
+        extra_info: T.List[mlog.TV_Loggable] = []
+        progobj = self.program_lookup(args, for_machine, required, search_dirs, extra_info)
+        if progobj is None:
+            progobj = self.notfound_program(args)
+
+        if isinstance(progobj, ExternalProgram) and not progobj.found():
+            if not silent:
+                mlog.log('Program', mlog.bold(progobj.get_name()), 'found:', mlog.red('NO'))
+            if required:
+                m = 'Program {!r} not found or not executable'
+                raise InterpreterException(m.format(progobj.get_name()))
+            return progobj
+
+        if wanted:
+            if version_func:
+                version = version_func(progobj)
+            elif isinstance(progobj, build.Executable):
+                if progobj.subproject:
+                    interp = self.subprojects[progobj.subproject].held_object
+                else:
+                    interp = self
+                assert isinstance(interp, Interpreter)
+                version = interp.project_version
+            else:
+                version = progobj.get_version(self)
+            is_found, not_found, _ = mesonlib.version_compare_many(version, wanted)
+            if not is_found:
+                mlog.log('Program', mlog.bold(progobj.name), 'found:', mlog.red('NO'),
+                         'found', mlog.normal_cyan(version), 'but need:',
+                         mlog.bold(', '.join([f"'{e}'" for e in not_found])), *extra_info)
+                if required:
+                    m = 'Invalid version of program, need {!r} {!r} found {!r}.'
+                    raise InterpreterException(m.format(progobj.name, not_found, version))
+                return self.notfound_program(args)
+            extra_info.insert(0, mlog.normal_cyan(version))
+
+        # Only store successful lookups
+        self.store_name_lookups(args)
+        if not silent:
+            mlog.log('Program', mlog.bold(progobj.name), 'found:', mlog.green('YES'), *extra_info)
+        if isinstance(progobj, build.Executable):
+            progobj.was_returned_by_find_program = True
+        return progobj
+
+    def program_lookup(self, args: T.List[mesonlib.FileOrString], for_machine: MachineChoice,
+                       required: bool, search_dirs: T.List[str], extra_info: T.List[mlog.TV_Loggable]
+                       ) -> T.Optional[T.Union[ExternalProgram, build.Executable, OverrideProgram]]:
+        progobj = self.program_from_overrides(args, extra_info)
+        if progobj:
+            return progobj
+
+        if args[0] == 'meson':
+            # Override find_program('meson') to return what we were invoked with
+            return ExternalProgram('meson', self.environment.get_build_command(), silent=True)
+
+        fallback = None
+        wrap_mode = self.coredata.get_option(OptionKey('wrap_mode'))
+        if wrap_mode != WrapMode.nofallback and self.environment.wrap_resolver:
+            fallback = self.environment.wrap_resolver.find_program_provider(args)
+        if fallback and wrap_mode == WrapMode.forcefallback:
+            return self.find_program_fallback(fallback, args, required, extra_info)
+
+        progobj = self.program_from_file_for(for_machine, args)
+        if progobj is None:
+            progobj = self.program_from_system(args, search_dirs, extra_info)
+        if progobj is None and args[0].endswith('python3'):
+            prog = ExternalProgram('python3', mesonlib.python_command, silent=True)
+            progobj = prog if prog.found() else None
+        if progobj is None and fallback and required:
+            progobj = self.find_program_fallback(fallback, args, required, extra_info)
+
+        return progobj
+
+    def find_program_fallback(self, fallback: str, args: T.List[mesonlib.FileOrString],
+                              required: bool, extra_info: T.List[mlog.TV_Loggable]
+                              ) -> T.Optional[T.Union[ExternalProgram, build.Executable, OverrideProgram]]:
+        mlog.log('Fallback to subproject', mlog.bold(fallback), 'which provides program',
+                 mlog.bold(' '.join(args)))
+        sp_kwargs: kwtypes.DoSubproject = {
+            'required': required,
+            'default_options': {},
+            'version': [],
+            'cmake_options': [],
+            'options': None,
+        }
+        self.do_subproject(fallback, 'meson', sp_kwargs)
+        return self.program_from_overrides(args, extra_info)
+
+    @typed_pos_args('find_program', varargs=(str, mesonlib.File), min_varargs=1)
+    @typed_kwargs(
+        'find_program',
+        DISABLER_KW.evolve(since='0.49.0'),
+        NATIVE_KW,
+        REQUIRED_KW,
+        KwargInfo('dirs', ContainerTypeInfo(list, str), default=[], listify=True, since='0.53.0'),
+        KwargInfo('version', ContainerTypeInfo(list, str), default=[], listify=True, since='0.52.0'),
+    )
+    @disablerIfNotFound
+    def func_find_program(self, node: mparser.BaseNode, args: T.Tuple[T.List[mesonlib.FileOrString]],
+                          kwargs: 'kwtypes.FindProgram',
+                          ) -> T.Union['build.Executable', ExternalProgram, 'OverrideProgram']:
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+        if disabled:
+            mlog.log('Program', mlog.bold(' '.join(args[0])), 'skipped: feature', mlog.bold(feature), 'disabled')
+            return self.notfound_program(args[0])
+
+        search_dirs = extract_search_dirs(kwargs)
+        return self.find_program_impl(args[0], kwargs['native'], required=required,
+                                      silent=False, wanted=kwargs['version'],
+                                      search_dirs=search_dirs)
+
+    def func_find_library(self, node, args, kwargs):
+        raise InvalidCode('find_library() is removed, use meson.get_compiler(\'name\').find_library() instead.\n'
+                          'Look here for documentation: http://mesonbuild.com/Reference-manual.html#compiler-object\n'
+                          'Look here for example: http://mesonbuild.com/howtox.html#add-math-library-lm-portably\n'
+                          )
+
+    # When adding kwargs, please check if they make sense in dependencies.get_dep_identifier()
+    @FeatureNewKwargs('dependency', '0.57.0', ['cmake_package_version'])
+    @FeatureNewKwargs('dependency', '0.56.0', ['allow_fallback'])
+    @FeatureNewKwargs('dependency', '0.54.0', ['components'])
+    @FeatureNewKwargs('dependency', '0.52.0', ['include_type'])
+    @FeatureNewKwargs('dependency', '0.50.0', ['not_found_message', 'cmake_module_path', 'cmake_args'])
+    @FeatureNewKwargs('dependency', '0.49.0', ['disabler'])
+    @FeatureNewKwargs('dependency', '0.40.0', ['method'])
+    @disablerIfNotFound
+    @permittedKwargs(permitted_dependency_kwargs)
+    @typed_pos_args('dependency', varargs=str, min_varargs=1)
+    @typed_kwargs('dependency', DEFAULT_OPTIONS.evolve(since='0.38.0'), allow_unknown=True)
+    def func_dependency(self, node: mparser.BaseNode, args: T.Tuple[T.List[str]], kwargs) -> Dependency:
+        # Replace '' by empty list of names
+        names = [n for n in args[0] if n]
+        if len(names) > 1:
+            FeatureNew('dependency with more than one name', '0.60.0').use(self.subproject)
+        allow_fallback = kwargs.get('allow_fallback')
+        if allow_fallback is not None and not isinstance(allow_fallback, bool):
+            raise InvalidArguments('"allow_fallback" argument must be boolean')
+        fallback = kwargs.get('fallback')
+        default_options = kwargs.get('default_options')
+        df = DependencyFallbacksHolder(self, names, allow_fallback, default_options)
+        df.set_fallback(fallback)
+        not_found_message = kwargs.get('not_found_message', '')
+        if not isinstance(not_found_message, str):
+            raise InvalidArguments('The not_found_message must be a string.')
+        try:
+            d = df.lookup(kwargs)
+        except Exception:
+            if not_found_message:
+                self.message_impl([not_found_message])
+            raise
+        assert isinstance(d, Dependency)
+        if not d.found() and not_found_message:
+            self.message_impl([not_found_message])
+        # Ensure the correct include type
+        if 'include_type' in kwargs:
+            wanted = kwargs['include_type']
+            if not isinstance(wanted, str):
+                raise InvalidArguments('The `include_type` kwarg must be a string')
+            actual = d.get_include_type()
+            if wanted != actual:
+                mlog.debug(f'Current include type of {args[0]} is {actual}. Converting to requested {wanted}')
+                d = d.generate_system_dependency(wanted)
+        if d.feature_since is not None:
+            version, extra_msg = d.feature_since
+            FeatureNew.single_use(f'dep {d.name!r} custom lookup', version, self.subproject, extra_msg, node)
+        for f in d.featurechecks:
+            f.use(self.subproject, node)
+        return d
+
+    @FeatureNew('disabler', '0.44.0')
+    @noKwargs
+    @noPosargs
+    def func_disabler(self, node, args, kwargs):
+        return Disabler()
+
+    @FeatureNewKwargs('executable', '0.42.0', ['implib'])
+    @FeatureNewKwargs('executable', '0.56.0', ['win_subsystem'])
+    @FeatureDeprecatedKwargs('executable', '0.56.0', ['gui_app'], extra_message="Use 'win_subsystem' instead.")
+    @permittedKwargs(build.known_exe_kwargs)
+    @typed_pos_args('executable', str, varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList, build.StructuredSources, build.ExtractedObjects, build.BuildTarget))
+    @typed_kwargs('executable', OVERRIDE_OPTIONS_KW, allow_unknown=True)
+    def func_executable(self, node: mparser.BaseNode,
+                        args: T.Tuple[str, T.List[BuildTargetSource]],
+                        kwargs) -> build.Executable:
+        return self.build_target(node, args, kwargs, build.Executable)
+
+    @permittedKwargs(build.known_stlib_kwargs)
+    @typed_pos_args('static_library', str, varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList, build.StructuredSources, build.ExtractedObjects, build.BuildTarget))
+    @typed_kwargs('static_library', OVERRIDE_OPTIONS_KW, allow_unknown=True)
+    def func_static_lib(self, node: mparser.BaseNode,
+                        args: T.Tuple[str, T.List[BuildTargetSource]],
+                        kwargs) -> build.StaticLibrary:
+        return self.build_target(node, args, kwargs, build.StaticLibrary)
+
+    @permittedKwargs(build.known_shlib_kwargs)
+    @typed_pos_args('shared_library', str, varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList, build.StructuredSources, build.ExtractedObjects, build.BuildTarget))
+    @typed_kwargs('shared_library', OVERRIDE_OPTIONS_KW, allow_unknown=True)
+    def func_shared_lib(self, node: mparser.BaseNode,
+                        args: T.Tuple[str, T.List[BuildTargetSource]],
+                        kwargs) -> build.SharedLibrary:
+        holder = self.build_target(node, args, kwargs, build.SharedLibrary)
+        holder.shared_library_only = True
+        return holder
+
+    @permittedKwargs(known_library_kwargs)
+    @typed_pos_args('both_libraries', str, varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList, build.StructuredSources, build.ExtractedObjects, build.BuildTarget))
+    @typed_kwargs('both_libraries', OVERRIDE_OPTIONS_KW, allow_unknown=True)
+    def func_both_lib(self, node: mparser.BaseNode,
+                      args: T.Tuple[str, T.List[BuildTargetSource]],
+                      kwargs) -> build.BothLibraries:
+        return self.build_both_libraries(node, args, kwargs)
+
+    @FeatureNew('shared_module', '0.37.0')
+    @permittedKwargs(build.known_shmod_kwargs)
+    @typed_pos_args('shared_module', str, varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList, build.StructuredSources, build.ExtractedObjects, build.BuildTarget))
+    @typed_kwargs('shared_module', OVERRIDE_OPTIONS_KW, allow_unknown=True)
+    def func_shared_module(self, node: mparser.BaseNode,
+                           args: T.Tuple[str, T.List[BuildTargetSource]],
+                           kwargs) -> build.SharedModule:
+        return self.build_target(node, args, kwargs, build.SharedModule)
+
+    @permittedKwargs(known_library_kwargs)
+    @typed_pos_args('library', str, varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList, build.StructuredSources, build.ExtractedObjects, build.BuildTarget))
+    @typed_kwargs('library', OVERRIDE_OPTIONS_KW, allow_unknown=True)
+    def func_library(self, node: mparser.BaseNode,
+                     args: T.Tuple[str, T.List[BuildTargetSource]],
+                     kwargs) -> build.Executable:
+        return self.build_library(node, args, kwargs)
+
+    @permittedKwargs(build.known_jar_kwargs)
+    @typed_pos_args('jar', str, varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList, build.ExtractedObjects, build.BuildTarget))
+    @typed_kwargs('jar', OVERRIDE_OPTIONS_KW, allow_unknown=True)
+    def func_jar(self, node: mparser.BaseNode,
+                 args: T.Tuple[str, T.List[T.Union[str, mesonlib.File, build.GeneratedTypes]]],
+                 kwargs) -> build.Jar:
+        return self.build_target(node, args, kwargs, build.Jar)
+
+    @FeatureNewKwargs('build_target', '0.40.0', ['link_whole', 'override_options'])
+    @permittedKwargs(known_build_target_kwargs)
+    @typed_pos_args('build_target', str, varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList, build.StructuredSources, build.ExtractedObjects, build.BuildTarget))
+    @typed_kwargs('build_target', OVERRIDE_OPTIONS_KW, allow_unknown=True)
+    def func_build_target(self, node: mparser.BaseNode,
+                          args: T.Tuple[str, T.List[BuildTargetSource]],
+                          kwargs) -> T.Union[build.Executable, build.StaticLibrary, build.SharedLibrary,
+                                             build.SharedModule, build.BothLibraries, build.Jar]:
+        if 'target_type' not in kwargs:
+            raise InterpreterException('Missing target_type keyword argument')
+        target_type = kwargs.pop('target_type')
+        if target_type == 'executable':
+            return self.build_target(node, args, kwargs, build.Executable)
+        elif target_type == 'shared_library':
+            return self.build_target(node, args, kwargs, build.SharedLibrary)
+        elif target_type == 'shared_module':
+            FeatureNew.single_use(
+                'build_target(target_type: \'shared_module\')',
+                '0.51.0', self.subproject, location=node)
+            return self.build_target(node, args, kwargs, build.SharedModule)
+        elif target_type == 'static_library':
+            return self.build_target(node, args, kwargs, build.StaticLibrary)
+        elif target_type == 'both_libraries':
+            return self.build_both_libraries(node, args, kwargs)
+        elif target_type == 'library':
+            return self.build_library(node, args, kwargs)
+        elif target_type == 'jar':
+            return self.build_target(node, args, kwargs, build.Jar)
+        else:
+            raise InterpreterException('Unknown target_type.')
+
+    @noPosargs
+    @typed_kwargs(
+        'vcs_tag',
+        CT_INPUT_KW.evolve(required=True),
+        MULTI_OUTPUT_KW,
+        # Cannot use the COMMAND_KW because command is allowed to be empty
+        KwargInfo(
+            'command',
+            ContainerTypeInfo(list, (str, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex, ExternalProgram, mesonlib.File)),
+            listify=True,
+            default=[],
+        ),
+        KwargInfo('fallback', (str, NoneType)),
+        KwargInfo('replace_string', str, default='@VCS_TAG@'),
+    )
+    def func_vcs_tag(self, node: mparser.BaseNode, args: T.List['TYPE_var'], kwargs: 'kwtypes.VcsTag') -> build.CustomTarget:
+        if kwargs['fallback'] is None:
+            FeatureNew.single_use('Optional fallback in vcs_tag', '0.41.0', self.subproject, location=node)
+        fallback = kwargs['fallback'] or self.project_version
+        replace_string = kwargs['replace_string']
+        regex_selector = '(.*)' # default regex selector for custom command: use complete output
+        vcs_cmd = kwargs['command']
+        source_dir = os.path.normpath(os.path.join(self.environment.get_source_dir(), self.subdir))
+        if vcs_cmd:
+            if isinstance(vcs_cmd[0], (str, mesonlib.File)):
+                if isinstance(vcs_cmd[0], mesonlib.File):
+                    FeatureNew.single_use('vcs_tag with file as the first argument', '0.62.0', self.subproject, location=node)
+                maincmd = self.find_program_impl(vcs_cmd[0], required=False)
+                if maincmd.found():
+                    vcs_cmd[0] = maincmd
+            else:
+                FeatureNew.single_use('vcs_tag with custom_tgt, external_program, or exe as the first argument', '0.63.0', self.subproject, location=node)
+        else:
+            vcs = mesonlib.detect_vcs(source_dir)
+            if vcs:
+                mlog.log('Found {} repository at {}'.format(vcs['name'], vcs['wc_dir']))
+                vcs_cmd = vcs['get_rev'].split()
+                regex_selector = vcs['rev_regex']
+            else:
+                vcs_cmd = [' '] # executing this cmd will fail in vcstagger.py and force to use the fallback string
+        # vcstagger.py parameters: infile, outfile, fallback, source_dir, replace_string, regex_selector, command...
+
+        self._validate_custom_target_outputs(len(kwargs['input']) > 1, kwargs['output'], "vcs_tag")
+
+        cmd = self.environment.get_build_command() + \
+            ['--internal',
+             'vcstagger',
+             '@INPUT0@',
+             '@OUTPUT0@',
+             fallback,
+             source_dir,
+             replace_string,
+             regex_selector] + vcs_cmd
+
+        tg = build.CustomTarget(
+            kwargs['output'][0],
+            self.subdir,
+            self.subproject,
+            self.environment,
+            cmd,
+            self.source_strings_to_files(kwargs['input']),
+            kwargs['output'],
+            build_by_default=True,
+            build_always_stale=True,
+        )
+        self.add_target(tg.name, tg)
+        return tg
+
+    @FeatureNew('subdir_done', '0.46.0')
+    @noPosargs
+    @noKwargs
+    def func_subdir_done(self, node: mparser.BaseNode, args: TYPE_var, kwargs: TYPE_kwargs) -> T.NoReturn:
+        raise SubdirDoneRequest()
+
+    @staticmethod
+    def _validate_custom_target_outputs(has_multi_in: bool, outputs: T.Iterable[str], name: str) -> None:
+        """Checks for additional invalid values in a custom_target output.
+
+        This cannot be done with typed_kwargs because it requires the number of
+        inputs.
+        """
+        for out in outputs:
+            if has_multi_in and ('@PLAINNAME@' in out or '@BASENAME@' in out):
+                raise InvalidArguments(f'{name}: output cannot contain "@PLAINNAME@" or "@BASENAME@" '
+                                       'when there is more than one input (we can\'t know which to use)')
+
+    @typed_pos_args('custom_target', optargs=[str])
+    @typed_kwargs(
+        'custom_target',
+        COMMAND_KW,
+        CT_BUILD_ALWAYS,
+        CT_BUILD_ALWAYS_STALE,
+        CT_BUILD_BY_DEFAULT,
+        CT_INPUT_KW,
+        CT_INSTALL_DIR_KW,
+        CT_INSTALL_TAG_KW,
+        MULTI_OUTPUT_KW,
+        DEPENDS_KW,
+        DEPEND_FILES_KW,
+        DEPFILE_KW,
+        ENV_KW.evolve(since='0.57.0'),
+        INSTALL_KW,
+        INSTALL_MODE_KW.evolve(since='0.47.0'),
+        KwargInfo('feed', bool, default=False, since='0.59.0'),
+        KwargInfo('capture', bool, default=False),
+        KwargInfo('console', bool, default=False, since='0.48.0'),
+    )
+    def func_custom_target(self, node: mparser.FunctionNode, args: T.Tuple[str],
+                           kwargs: 'kwtypes.CustomTarget') -> build.CustomTarget:
+        if kwargs['depfile'] and ('@BASENAME@' in kwargs['depfile'] or '@PLAINNAME@' in kwargs['depfile']):
+            FeatureNew.single_use('substitutions in custom_target depfile', '0.47.0', self.subproject, location=node)
+        install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
+
+        # Don't mutate the kwargs
+
+        build_by_default = kwargs['build_by_default']
+        build_always_stale = kwargs['build_always_stale']
+        # Remap build_always to build_by_default and build_always_stale
+        if kwargs['build_always'] is not None and kwargs['build_always_stale'] is not None:
+            raise InterpreterException('CustomTarget: "build_always" and "build_always_stale" are mutually exclusive')
+
+        if build_by_default is None and kwargs['install']:
+            build_by_default = True
+
+        elif kwargs['build_always'] is not None:
+            if build_by_default is None:
+                build_by_default = kwargs['build_always']
+            build_always_stale = kwargs['build_by_default']
+
+        # These are nullable so that we can know whether they're explicitly
+        # set or not. If they haven't been overwritten, set them to their true
+        # default
+        if build_by_default is None:
+            build_by_default = False
+        if build_always_stale is None:
+            build_always_stale = False
+
+        name = args[0]
+        if name is None:
+            # name will default to first output, but we cannot do that yet because
+            # they could need substitutions (e.g. @BASENAME@) first. CustomTarget()
+            # will take care of setting a proper default but name must be an empty
+            # string in the meantime.
+            FeatureNew.single_use('custom_target() with no name argument', '0.60.0', self.subproject, location=node)
+            name = ''
+        inputs = self.source_strings_to_files(kwargs['input'], strict=False)
+        command = kwargs['command']
+        if command and isinstance(command[0], str):
+            command[0] = self.find_program_impl([command[0]])
+
+        if len(inputs) > 1 and kwargs['feed']:
+            raise InvalidArguments('custom_target: "feed" keyword argument can only be used with a single input')
+        if len(kwargs['output']) > 1 and kwargs['capture']:
+            raise InvalidArguments('custom_target: "capture" keyword argument can only be used with a single output')
+        if kwargs['capture'] and kwargs['console']:
+            raise InvalidArguments('custom_target: "capture" and "console" keyword arguments are mutually exclusive')
+        for c in command:
+            if kwargs['capture'] and isinstance(c, str) and '@OUTPUT@' in c:
+                raise InvalidArguments('custom_target: "capture" keyword argument cannot be used with "@OUTPUT@"')
+            if kwargs['feed'] and isinstance(c, str) and '@INPUT@' in c:
+                raise InvalidArguments('custom_target: "feed" keyword argument cannot be used with "@INPUT@"')
+        if kwargs['install'] and not kwargs['install_dir']:
+            raise InvalidArguments('custom_target: "install_dir" keyword argument must be set when "install" is true.')
+        if len(kwargs['install_dir']) > 1:
+            FeatureNew.single_use('multiple install_dir for custom_target', '0.40.0', self.subproject, location=node)
+        if len(kwargs['install_tag']) not in {0, 1, len(kwargs['output'])}:
+            raise InvalidArguments('custom_target: install_tag argument must have 0 or 1 outputs, '
+                                   'or the same number of elements as the output keyword argument. '
+                                   f'(there are {len(kwargs["install_tag"])} install_tags, '
+                                   f'and {len(kwargs["output"])} outputs)')
+
+        for t in kwargs['output']:
+            self.validate_forbidden_targets(t)
+        self._validate_custom_target_outputs(len(inputs) > 1, kwargs['output'], "custom_target")
+
+        tg = build.CustomTarget(
+            name,
+            self.subdir,
+            self.subproject,
+            self.environment,
+            command,
+            inputs,
+            kwargs['output'],
+            build_always_stale=build_always_stale,
+            build_by_default=build_by_default,
+            capture=kwargs['capture'],
+            console=kwargs['console'],
+            depend_files=kwargs['depend_files'],
+            depfile=kwargs['depfile'],
+            extra_depends=kwargs['depends'],
+            env=kwargs['env'],
+            feed=kwargs['feed'],
+            install=kwargs['install'],
+            install_dir=kwargs['install_dir'],
+            install_mode=install_mode,
+            install_tag=kwargs['install_tag'],
+            backend=self.backend)
+        self.add_target(tg.name, tg)
+        return tg
+
+    @typed_pos_args('run_target', str)
+    @typed_kwargs(
+        'run_target',
+        COMMAND_KW,
+        DEPENDS_KW,
+        ENV_KW.evolve(since='0.57.0'),
+    )
+    def func_run_target(self, node: mparser.FunctionNode, args: T.Tuple[str],
+                        kwargs: 'kwtypes.RunTarget') -> build.RunTarget:
+        all_args = kwargs['command'].copy()
+
+        for i in listify(all_args):
+            if isinstance(i, ExternalProgram) and not i.found():
+                raise InterpreterException(f'Tried to use non-existing executable {i.name!r}')
+        if isinstance(all_args[0], str):
+            all_args[0] = self.find_program_impl([all_args[0]])
+        name = args[0]
+        tg = build.RunTarget(name, all_args, kwargs['depends'], self.subdir, self.subproject, self.environment,
+                             kwargs['env'])
+        self.add_target(name, tg)
+        return tg
+
+    @FeatureNew('alias_target', '0.52.0')
+    @typed_pos_args('alias_target', str, varargs=build.Target, min_varargs=1)
+    @noKwargs
+    def func_alias_target(self, node: mparser.BaseNode, args: T.Tuple[str, T.List[build.Target]],
+                          kwargs: 'TYPE_kwargs') -> build.AliasTarget:
+        name, deps = args
+        tg = build.AliasTarget(name, deps, self.subdir, self.subproject, self.environment)
+        self.add_target(name, tg)
+        return tg
+
+    @typed_pos_args('generator', (build.Executable, ExternalProgram))
+    @typed_kwargs(
+        'generator',
+        KwargInfo('arguments', ContainerTypeInfo(list, str, allow_empty=False), required=True, listify=True),
+        KwargInfo('output', ContainerTypeInfo(list, str, allow_empty=False), required=True, listify=True),
+        DEPFILE_KW,
+        DEPENDS_KW,
+        KwargInfo('capture', bool, default=False, since='0.43.0'),
+    )
+    def func_generator(self, node: mparser.FunctionNode,
+                       args: T.Tuple[T.Union[build.Executable, ExternalProgram]],
+                       kwargs: 'kwtypes.FuncGenerator') -> build.Generator:
+        for rule in kwargs['output']:
+            if '@BASENAME@' not in rule and '@PLAINNAME@' not in rule:
+                raise InvalidArguments('Every element of "output" must contain @BASENAME@ or @PLAINNAME@.')
+            if has_path_sep(rule):
+                raise InvalidArguments('"output" must not contain a directory separator.')
+        if len(kwargs['output']) > 1:
+            for o in kwargs['output']:
+                if '@OUTPUT@' in o:
+                    raise InvalidArguments('Tried to use @OUTPUT@ in a rule with more than one output.')
+
+        gen = build.Generator(args[0], **kwargs)
+        self.generators.append(gen)
+        return gen
+
+    @typed_pos_args('benchmark', str, (build.Executable, build.Jar, ExternalProgram, mesonlib.File))
+    @typed_kwargs('benchmark', *TEST_KWS)
+    def func_benchmark(self, node: mparser.BaseNode,
+                       args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File]],
+                       kwargs: 'kwtypes.FuncBenchmark') -> None:
+        self.add_test(node, args, kwargs, False)
+
+    @typed_pos_args('test', str, (build.Executable, build.Jar, ExternalProgram, mesonlib.File))
+    @typed_kwargs('test', *TEST_KWS, KwargInfo('is_parallel', bool, default=True))
+    def func_test(self, node: mparser.BaseNode,
+                  args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File]],
+                  kwargs: 'kwtypes.FuncTest') -> None:
+        self.add_test(node, args, kwargs, True)
+
+    def unpack_env_kwarg(self, kwargs: T.Union[build.EnvironmentVariables, T.Dict[str, 'TYPE_var'], T.List['TYPE_var'], str]) -> build.EnvironmentVariables:
+        envlist = kwargs.get('env')
+        if envlist is None:
+            return build.EnvironmentVariables()
+        msg = ENV_KW.validator(envlist)
+        if msg:
+            raise InvalidArguments(f'"env": {msg}')
+        return ENV_KW.convertor(envlist)
+
+    def make_test(self, node: mparser.BaseNode,
+                  args: T.Tuple[str, T.Union[build.Executable, build.Jar, ExternalProgram, mesonlib.File]],
+                  kwargs: 'kwtypes.BaseTest') -> Test:
+        name = args[0]
+        if ':' in name:
+            mlog.deprecation(f'":" is not allowed in test name "{name}", it has been replaced with "_"',
+                             location=node)
+            name = name.replace(':', '_')
+        exe = args[1]
+        if isinstance(exe, ExternalProgram):
+            if not exe.found():
+                raise InvalidArguments('Tried to use not-found external program as test exe')
+        elif isinstance(exe, mesonlib.File):
+            exe = self.find_program_impl([exe])
+
+        env = self.unpack_env_kwarg(kwargs)
+
+        if kwargs['timeout'] <= 0:
+            FeatureNew.single_use('test() timeout <= 0', '0.57.0', self.subproject, location=node)
+
+        prj = self.subproject if self.is_subproject() else self.build.project_name
+
+        suite: T.List[str] = []
+        for s in kwargs['suite']:
+            if s:
+                s = ':' + s
+            suite.append(prj.replace(' ', '_').replace(':', '_') + s)
+
+        return Test(name,
+                    prj,
+                    suite,
+                    exe,
+                    kwargs['depends'],
+                    kwargs.get('is_parallel', False),
+                    kwargs['args'],
+                    env,
+                    kwargs['should_fail'],
+                    kwargs['timeout'],
+                    kwargs['workdir'],
+                    kwargs['protocol'],
+                    kwargs['priority'],
+                    kwargs['verbose'])
+
+    def add_test(self, node: mparser.BaseNode, args: T.List, kwargs: T.Dict[str, T.Any], is_base_test: bool):
+        t = self.make_test(node, args, kwargs)
+        if is_base_test:
+            self.build.tests.append(t)
+            mlog.debug('Adding test', mlog.bold(t.name, True))
+        else:
+            self.build.benchmarks.append(t)
+            mlog.debug('Adding benchmark', mlog.bold(t.name, True))
+
+    @typed_pos_args('install_headers', varargs=(str, mesonlib.File))
+    @typed_kwargs(
+        'install_headers',
+        PRESERVE_PATH_KW,
+        KwargInfo('subdir', (str, NoneType)),
+        INSTALL_MODE_KW.evolve(since='0.47.0'),
+        INSTALL_DIR_KW,
+    )
+    def func_install_headers(self, node: mparser.BaseNode,
+                             args: T.Tuple[T.List['mesonlib.FileOrString']],
+                             kwargs: 'kwtypes.FuncInstallHeaders') -> build.Headers:
+        install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
+        source_files = self.source_strings_to_files(args[0])
+        install_subdir = kwargs['subdir']
+        if install_subdir is not None:
+            if kwargs['install_dir'] is not None:
+                raise InterpreterException('install_headers: cannot specify both "install_dir" and "subdir". Use only "install_dir".')
+            if os.path.isabs(install_subdir):
+                mlog.deprecation('Subdir keyword must not be an absolute path. This will be a hard error in the next release.')
+        else:
+            install_subdir = ''
+
+        dirs = collections.defaultdict(list)
+        ret_headers = []
+        if kwargs['preserve_path']:
+            for file in source_files:
+                dirname = os.path.dirname(file.fname)
+                dirs[dirname].append(file)
+        else:
+            dirs[''].extend(source_files)
+
+        for childdir in dirs:
+            h = build.Headers(dirs[childdir], os.path.join(install_subdir, childdir), kwargs['install_dir'],
+                              install_mode, self.subproject)
+            ret_headers.append(h)
+            self.build.headers.append(h)
+
+        return ret_headers
+
+    @typed_pos_args('install_man', varargs=(str, mesonlib.File))
+    @typed_kwargs(
+        'install_man',
+        KwargInfo('locale', (str, NoneType), since='0.58.0'),
+        INSTALL_MODE_KW.evolve(since='0.47.0'),
+        INSTALL_DIR_KW,
+    )
+    def func_install_man(self, node: mparser.BaseNode,
+                         args: T.Tuple[T.List['mesonlib.FileOrString']],
+                         kwargs: 'kwtypes.FuncInstallMan') -> build.Man:
+        install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
+        # We just need to narrow this, because the input is limited to files and
+        # Strings as inputs, so only Files will be returned
+        sources = self.source_strings_to_files(args[0])
+        for s in sources:
+            try:
+                num = int(s.rsplit('.', 1)[-1])
+            except (IndexError, ValueError):
+                num = 0
+            if not 1 <= num <= 9:
+                raise InvalidArguments('Man file must have a file extension of a number between 1 and 9')
+
+        m = build.Man(sources, kwargs['install_dir'], install_mode,
+                      self.subproject, kwargs['locale'])
+        self.build.man.append(m)
+
+        return m
+
+    @FeatureNew('install_emptydir', '0.60.0')
+    @typed_kwargs(
+        'install_emptydir',
+        INSTALL_MODE_KW,
+        KwargInfo('install_tag', (str, NoneType), since='0.62.0')
+    )
+    def func_install_emptydir(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs) -> None:
+        d = build.EmptyDir(args[0], kwargs['install_mode'], self.subproject, kwargs['install_tag'])
+        self.build.emptydir.append(d)
+
+        return d
+
+    @FeatureNew('install_symlink', '0.61.0')
+    @typed_pos_args('symlink_name', str)
+    @typed_kwargs(
+        'install_symlink',
+        KwargInfo('pointing_to', str, required=True),
+        KwargInfo('install_dir', str, required=True),
+        INSTALL_TAG_KW,
+    )
+    def func_install_symlink(self, node: mparser.BaseNode,
+                             args: T.Tuple[T.List[str]],
+                             kwargs) -> build.SymlinkData:
+        name = args[0] # Validation while creating the SymlinkData object
+        target = kwargs['pointing_to']
+        l = build.SymlinkData(target, name, kwargs['install_dir'],
+                              self.subproject, kwargs['install_tag'])
+        self.build.symlinks.append(l)
+        return l
+
+    @FeatureNew('structured_sources', '0.62.0')
+    @typed_pos_args('structured_sources', object, optargs=[dict])
+    @noKwargs
+    @noArgsFlattening
+    def func_structured_sources(
+            self, node: mparser.BaseNode,
+            args: T.Tuple[object, T.Optional[T.Dict[str, object]]],
+            kwargs: 'TYPE_kwargs') -> build.StructuredSources:
+        valid_types = (str, mesonlib.File, build.GeneratedList, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)
+        sources: T.Dict[str, T.List[T.Union[mesonlib.File, 'build.GeneratedTypes']]] = collections.defaultdict(list)
+
+        for arg in mesonlib.listify(args[0]):
+            if not isinstance(arg, valid_types):
+                raise InvalidArguments(f'structured_sources: type "{type(arg)}" is not valid')
+            if isinstance(arg, str):
+                arg = mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, arg)
+            sources[''].append(arg)
+        if args[1]:
+            if '' in args[1]:
+                raise InvalidArguments('structured_sources: keys to dictionary argument may not be an empty string.')
+            for k, v in args[1].items():
+                for arg in mesonlib.listify(v):
+                    if not isinstance(arg, valid_types):
+                        raise InvalidArguments(f'structured_sources: type "{type(arg)}" is not valid')
+                    if isinstance(arg, str):
+                        arg = mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, arg)
+                    sources[k].append(arg)
+        return build.StructuredSources(sources)
+
+    @typed_pos_args('subdir', str)
+    @typed_kwargs(
+        'subdir',
+        KwargInfo(
+            'if_found',
+            ContainerTypeInfo(list, object),
+            validator=lambda a: 'Objects must have a found() method' if not all(hasattr(x, 'found') for x in a) else None,
+            since='0.44.0',
+            default=[],
+            listify=True,
+        ),
+    )
+    def func_subdir(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs: 'kwtypes.Subdir') -> None:
+        mesonlib.check_direntry_issues(args)
+        if '..' in args[0]:
+            raise InvalidArguments('Subdir contains ..')
+        if self.subdir == '' and args[0] == self.subproject_dir:
+            raise InvalidArguments('Must not go into subprojects dir with subdir(), use subproject() instead.')
+        if self.subdir == '' and args[0].startswith('meson-'):
+            raise InvalidArguments('The "meson-" prefix is reserved and cannot be used for top-level subdir().')
+        if args[0] == '':
+            raise InvalidArguments("The argument given to subdir() is the empty string ''. This is prohibited.")
+        for i in kwargs['if_found']:
+            if not i.found():
+                return
+
+        prev_subdir = self.subdir
+        subdir = os.path.join(prev_subdir, args[0])
+        if os.path.isabs(subdir):
+            raise InvalidArguments('Subdir argument must be a relative path.')
+        absdir = os.path.join(self.environment.get_source_dir(), subdir)
+        symlinkless_dir = os.path.realpath(absdir)
+        build_file = os.path.join(symlinkless_dir, 'meson.build')
+        if build_file in self.processed_buildfiles:
+            raise InvalidArguments(f'Tried to enter directory "{subdir}", which has already been visited.')
+        self.processed_buildfiles.add(build_file)
+        self.subdir = subdir
+        os.makedirs(os.path.join(self.environment.build_dir, subdir), exist_ok=True)
+        buildfilename = os.path.join(self.subdir, environment.build_filename)
+        self.build_def_files.add(buildfilename)
+        absname = os.path.join(self.environment.get_source_dir(), buildfilename)
+        if not os.path.isfile(absname):
+            self.subdir = prev_subdir
+            raise InterpreterException(f"Nonexistent build file '{buildfilename!s}'")
+        with open(absname, encoding='utf-8') as f:
+            code = f.read()
+        assert isinstance(code, str)
+        try:
+            codeblock = mparser.Parser(code, absname).parse()
+        except mesonlib.MesonException as me:
+            me.file = absname
+            raise me
+        try:
+            self.evaluate_codeblock(codeblock)
+        except SubdirDoneRequest:
+            pass
+        self.subdir = prev_subdir
+
+    def _get_kwarg_install_mode(self, kwargs: T.Dict[str, T.Any]) -> T.Optional[FileMode]:
+        if kwargs.get('install_mode', None) is None:
+            return None
+        if isinstance(kwargs['install_mode'], FileMode):
+            return kwargs['install_mode']
+        install_mode: T.List[str] = []
+        mode = mesonlib.typeslistify(kwargs.get('install_mode', []), (str, int))
+        for m in mode:
+            # We skip any arguments that are set to `false`
+            if m is False:
+                m = None
+            install_mode.append(m)
+        if len(install_mode) > 3:
+            raise InvalidArguments('Keyword argument install_mode takes at '
+                                   'most 3 arguments.')
+        if len(install_mode) > 0 and install_mode[0] is not None and \
+           not isinstance(install_mode[0], str):
+            raise InvalidArguments('Keyword argument install_mode requires the '
+                                   'permissions arg to be a string or false')
+        return FileMode(*install_mode)
+
+    # This is either ignored on basically any OS nowadays, or silently gets
+    # ignored (Solaris) or triggers an "illegal operation" error (FreeBSD).
+    # It was likely added "because it exists", but should never be used. In
+    # theory it is useful for directories, but we never apply modes to
+    # directories other than in install_emptydir.
+    def _warn_kwarg_install_mode_sticky(self, mode: FileMode) -> None:
+        if mode.perms > 0 and mode.perms & stat.S_ISVTX:
+            mlog.deprecation('install_mode with the sticky bit on a file does not do anything and will '
+                             'be ignored since Meson 0.64.0', location=self.current_node)
+            perms = stat.filemode(mode.perms - stat.S_ISVTX)[1:]
+            return FileMode(perms, mode.owner, mode.group)
+        else:
+            return mode
+
+    @typed_pos_args('install_data', varargs=(str, mesonlib.File))
+    @typed_kwargs(
+        'install_data',
+        KwargInfo('sources', ContainerTypeInfo(list, (str, mesonlib.File)), listify=True, default=[]),
+        KwargInfo('rename', ContainerTypeInfo(list, str), default=[], listify=True, since='0.46.0'),
+        INSTALL_MODE_KW.evolve(since='0.38.0'),
+        INSTALL_TAG_KW.evolve(since='0.60.0'),
+        INSTALL_DIR_KW,
+        PRESERVE_PATH_KW.evolve(since='0.64.0'),
+    )
+    def func_install_data(self, node: mparser.BaseNode,
+                          args: T.Tuple[T.List['mesonlib.FileOrString']],
+                          kwargs: 'kwtypes.FuncInstallData') -> build.Data:
+        sources = self.source_strings_to_files(args[0] + kwargs['sources'])
+        rename = kwargs['rename'] or None
+        if rename:
+            if len(rename) != len(sources):
+                raise InvalidArguments(
+                    '"rename" and "sources" argument lists must be the same length if "rename" is given. '
+                    f'Rename has {len(rename)} elements and sources has {len(sources)}.')
+
+        install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
+        return self.install_data_impl(sources, kwargs['install_dir'], install_mode,
+                                      rename, kwargs['install_tag'],
+                                      preserve_path=kwargs['preserve_path'])
+
+    def install_data_impl(self, sources: T.List[mesonlib.File], install_dir: T.Optional[str],
+                          install_mode: FileMode, rename: T.Optional[str],
+                          tag: T.Optional[str],
+                          install_dir_name: T.Optional[str] = None,
+                          install_data_type: T.Optional[str] = None,
+                          preserve_path: bool = False) -> build.Data:
+
+        """Just the implementation with no validation."""
+        idir = install_dir or ''
+        idir_name = install_dir_name or idir or '{datadir}'
+        if isinstance(idir_name, P_OBJ.OptionString):
+            idir_name = idir_name.optname
+        dirs = collections.defaultdict(list)
+        ret_data = []
+        if preserve_path:
+            for file in sources:
+                dirname = os.path.dirname(file.fname)
+                dirs[dirname].append(file)
+        else:
+            dirs[''].extend(sources)
+
+        for childdir, files in dirs.items():
+            d = build.Data(files, os.path.join(idir, childdir), os.path.join(idir_name, childdir),
+                           install_mode, self.subproject, rename, tag, install_data_type)
+            ret_data.append(d)
+
+        self.build.data.extend(ret_data)
+        return ret_data
+
+    @typed_pos_args('install_subdir', str)
+    @typed_kwargs(
+        'install_subdir',
+        KwargInfo('install_dir', str, required=True),
+        KwargInfo('strip_directory', bool, default=False),
+        KwargInfo('exclude_files', ContainerTypeInfo(list, str),
+                  default=[], listify=True, since='0.42.0',
+                  validator=lambda x: 'cannot be absolute' if any(os.path.isabs(d) for d in x) else None),
+        KwargInfo('exclude_directories', ContainerTypeInfo(list, str),
+                  default=[], listify=True, since='0.42.0',
+                  validator=lambda x: 'cannot be absolute' if any(os.path.isabs(d) for d in x) else None),
+        INSTALL_MODE_KW.evolve(since='0.38.0'),
+        INSTALL_TAG_KW.evolve(since='0.60.0'),
+    )
+    def func_install_subdir(self, node: mparser.BaseNode, args: T.Tuple[str],
+                            kwargs: 'kwtypes.FuncInstallSubdir') -> build.InstallDir:
+        exclude = (set(kwargs['exclude_files']), set(kwargs['exclude_directories']))
+
+        srcdir = os.path.join(self.environment.source_dir, self.subdir, args[0])
+        if not os.path.isdir(srcdir) or not any(os.listdir(srcdir)):
+            FeatureNew.single_use('install_subdir with empty directory', '0.47.0', self.subproject, location=node)
+            FeatureDeprecated.single_use('install_subdir with empty directory', '0.60.0', self.subproject,
+                                         'It worked by accident and is buggy. Use install_emptydir instead.', node)
+        install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
+
+        idir_name = kwargs['install_dir']
+        if isinstance(idir_name, P_OBJ.OptionString):
+            idir_name = idir_name.optname
+
+        idir = build.InstallDir(
+            self.subdir,
+            args[0],
+            kwargs['install_dir'],
+            idir_name,
+            install_mode,
+            exclude,
+            kwargs['strip_directory'],
+            self.subproject,
+            install_tag=kwargs['install_tag'])
+        self.build.install_dirs.append(idir)
+        return idir
+
+    @noPosargs
+    @typed_kwargs(
+        'configure_file',
+        DEPFILE_KW.evolve(since='0.52.0'),
+        INSTALL_MODE_KW.evolve(since='0.47.0,'),
+        INSTALL_TAG_KW.evolve(since='0.60.0'),
+        KwargInfo('capture', bool, default=False, since='0.41.0'),
+        KwargInfo(
+            'command',
+            (ContainerTypeInfo(list, (build.Executable, ExternalProgram, compilers.Compiler, mesonlib.File, str), allow_empty=False), NoneType),
+            listify=True,
+        ),
+        KwargInfo(
+            'configuration',
+            (ContainerTypeInfo(dict, (str, int, bool)), build.ConfigurationData, NoneType),
+        ),
+        KwargInfo(
+            'copy', bool, default=False, since='0.47.0',
+            deprecated='0.64.0', deprecated_message='Use fs.copyfile instead',
+        ),
+        KwargInfo('encoding', str, default='utf-8', since='0.47.0'),
+        KwargInfo('format', str, default='meson', since='0.46.0',
+                  validator=in_set_validator({'meson', 'cmake', 'cmake@'})),
+        KwargInfo(
+            'input',
+            ContainerTypeInfo(list, (mesonlib.File, str)),
+            listify=True,
+            default=[],
+        ),
+        # Cannot use shared implementation until None backwards compat is dropped
+        KwargInfo('install', (bool, NoneType), since='0.50.0'),
+        KwargInfo('install_dir', (str, bool), default='',
+                  validator=lambda x: 'must be `false` if boolean' if x is True else None),
+        OUTPUT_KW,
+        KwargInfo('output_format', str, default='c', since='0.47.0',
+                  validator=in_set_validator({'c', 'nasm'})),
+    )
+    def func_configure_file(self, node: mparser.BaseNode, args: T.List[TYPE_var],
+                            kwargs: kwtypes.ConfigureFile):
+        actions = sorted(x for x in ['configuration', 'command', 'copy']
+                         if kwargs[x] not in [None, False])
+        num_actions = len(actions)
+        if num_actions == 0:
+            raise InterpreterException('Must specify an action with one of these '
+                                       'keyword arguments: \'configuration\', '
+                                       '\'command\', or \'copy\'.')
+        elif num_actions == 2:
+            raise InterpreterException('Must not specify both {!r} and {!r} '
+                                       'keyword arguments since they are '
+                                       'mutually exclusive.'.format(*actions))
+        elif num_actions == 3:
+            raise InterpreterException('Must specify one of {!r}, {!r}, and '
+                                       '{!r} keyword arguments since they are '
+                                       'mutually exclusive.'.format(*actions))
+
+        if kwargs['capture'] and not kwargs['command']:
+            raise InvalidArguments('configure_file: "capture" keyword requires "command" keyword.')
+
+        install_mode = self._warn_kwarg_install_mode_sticky(kwargs['install_mode'])
+
+        fmt = kwargs['format']
+        output_format = kwargs['output_format']
+        depfile = kwargs['depfile']
+
+        # Validate input
+        inputs = self.source_strings_to_files(kwargs['input'])
+        inputs_abs = []
+        for f in inputs:
+            if isinstance(f, mesonlib.File):
+                inputs_abs.append(f.absolute_path(self.environment.source_dir,
+                                                  self.environment.build_dir))
+                self.add_build_def_file(f)
+            else:
+                raise InterpreterException('Inputs can only be strings or file objects')
+
+        # Validate output
+        output = kwargs['output']
+        if inputs_abs:
+            values = mesonlib.get_filenames_templates_dict(inputs_abs, None)
+            outputs = mesonlib.substitute_values([output], values)
+            output = outputs[0]
+            if depfile:
+                depfile = mesonlib.substitute_values([depfile], values)[0]
+        ofile_rpath = os.path.join(self.subdir, output)
+        if ofile_rpath in self.configure_file_outputs:
+            mesonbuildfile = os.path.join(self.subdir, 'meson.build')
+            current_call = f"{mesonbuildfile}:{self.current_lineno}"
+            first_call = "{}:{}".format(mesonbuildfile, self.configure_file_outputs[ofile_rpath])
+            mlog.warning('Output file', mlog.bold(ofile_rpath, True), 'for configure_file() at', current_call, 'overwrites configure_file() output at', first_call)
+        else:
+            self.configure_file_outputs[ofile_rpath] = self.current_lineno
+        (ofile_path, ofile_fname) = os.path.split(os.path.join(self.subdir, output))
+        ofile_abs = os.path.join(self.environment.build_dir, ofile_path, ofile_fname)
+
+        # Perform the appropriate action
+        if kwargs['configuration'] is not None:
+            conf = kwargs['configuration']
+            if isinstance(conf, dict):
+                FeatureNew.single_use('configure_file.configuration dictionary', '0.49.0', self.subproject, location=node)
+                for k, v in conf.items():
+                    if not isinstance(v, (str, int, bool)):
+                        raise InvalidArguments(
+                            f'"configuration_data": initial value dictionary key "{k!r}"" must be "str | int | bool", not "{v!r}"')
+                conf = build.ConfigurationData(conf)
+            mlog.log('Configuring', mlog.bold(output), 'using configuration')
+            if len(inputs) > 1:
+                raise InterpreterException('At most one input file can given in configuration mode')
+            if inputs:
+                os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True)
+                file_encoding = kwargs['encoding']
+                missing_variables, confdata_useless = \
+                    mesonlib.do_conf_file(inputs_abs[0], ofile_abs, conf,
+                                          fmt, file_encoding, self.subproject)
+                if missing_variables:
+                    var_list = ", ".join(repr(m) for m in sorted(missing_variables))
+                    mlog.warning(
+                        f"The variable(s) {var_list} in the input file '{inputs[0]}' are not "
+                        "present in the given configuration data.", location=node)
+                if confdata_useless:
+                    ifbase = os.path.basename(inputs_abs[0])
+                    tv = FeatureNew.get_target_version(self.subproject)
+                    if FeatureNew.check_version(tv, '0.47.0'):
+                        mlog.warning('Got an empty configuration_data() object and found no '
+                                     f'substitutions in the input file {ifbase!r}. If you want to '
+                                     'copy a file to the build dir, use the \'copy:\' keyword '
+                                     'argument added in 0.47.0', location=node)
+            else:
+                mesonlib.dump_conf_header(ofile_abs, conf, output_format)
+            conf.used = True
+        elif kwargs['command'] is not None:
+            if len(inputs) > 1:
+                FeatureNew.single_use('multiple inputs in configure_file()', '0.52.0', self.subproject, location=node)
+            # We use absolute paths for input and output here because the cwd
+            # that the command is run from is 'unspecified', so it could change.
+            # Currently it's builddir/subdir for in_builddir else srcdir/subdir.
+            values = mesonlib.get_filenames_templates_dict(inputs_abs, [ofile_abs])
+            if depfile:
+                depfile = os.path.join(self.environment.get_scratch_dir(), depfile)
+                values['@DEPFILE@'] = depfile
+            # Substitute @INPUT@, @OUTPUT@, etc here.
+            _cmd = mesonlib.substitute_values(kwargs['command'], values)
+            mlog.log('Configuring', mlog.bold(output), 'with command')
+            cmd, *args = _cmd
+            res = self.run_command_impl(node, (cmd, args),
+                                        {'capture': True, 'check': True, 'env': build.EnvironmentVariables()},
+                                        True)
+            if kwargs['capture']:
+                dst_tmp = ofile_abs + '~'
+                file_encoding = kwargs['encoding']
+                with open(dst_tmp, 'w', encoding=file_encoding) as f:
+                    f.writelines(res.stdout)
+                if inputs_abs:
+                    shutil.copymode(inputs_abs[0], dst_tmp)
+                mesonlib.replace_if_different(ofile_abs, dst_tmp)
+            if depfile:
+                mlog.log('Reading depfile:', mlog.bold(depfile))
+                with open(depfile, encoding='utf-8') as f:
+                    df = DepFile(f.readlines())
+                    deps = df.get_all_dependencies(ofile_fname)
+                    for dep in deps:
+                        self.add_build_def_file(dep)
+
+        elif kwargs['copy']:
+            if len(inputs_abs) != 1:
+                raise InterpreterException('Exactly one input file must be given in copy mode')
+            os.makedirs(os.path.join(self.environment.build_dir, self.subdir), exist_ok=True)
+            shutil.copy2(inputs_abs[0], ofile_abs)
+
+        # Install file if requested, we check for the empty string
+        # for backwards compatibility. That was the behaviour before
+        # 0.45.0 so preserve it.
+        idir = kwargs['install_dir']
+        if idir is False:
+            idir = ''
+            FeatureDeprecated.single_use('configure_file install_dir: false', '0.50.0',
+                                         self.subproject, 'Use the `install:` kwarg instead', location=node)
+        install = kwargs['install'] if kwargs['install'] is not None else idir != ''
+        if install:
+            if not idir:
+                raise InterpreterException(
+                    '"install_dir" must be specified when "install" in a configure_file is true')
+            idir_name = idir
+            if isinstance(idir_name, P_OBJ.OptionString):
+                idir_name = idir_name.optname
+            cfile = mesonlib.File.from_built_file(ofile_path, ofile_fname)
+            install_tag = kwargs['install_tag']
+            self.build.data.append(build.Data([cfile], idir, idir_name, install_mode, self.subproject,
+                                              install_tag=install_tag, data_type='configure'))
+        return mesonlib.File.from_built_file(self.subdir, output)
+
+    def extract_incdirs(self, kwargs, key: str = 'include_directories'):
+        prospectives = extract_as_list(kwargs, key)
+        if key == 'include_directories':
+            for i in prospectives:
+                if isinstance(i, str):
+                    FeatureNew.single_use('include_directories kwarg of type string', '0.50.0', self.subproject,
+                                          f'Use include_directories({i!r}) instead', location=self.current_node)
+                    break
+
+        result = []
+        for p in prospectives:
+            if isinstance(p, build.IncludeDirs):
+                result.append(p)
+            elif isinstance(p, str):
+                result.append(self.build_incdir_object([p]))
+            else:
+                raise InterpreterException('Include directory objects can only be created from strings or include directories.')
+        return result
+
+    @typed_pos_args('include_directories', varargs=str)
+    @typed_kwargs('include_directories', KwargInfo('is_system', bool, default=False))
+    def func_include_directories(self, node: mparser.BaseNode, args: T.Tuple[T.List[str]],
+                                 kwargs: 'kwtypes.FuncIncludeDirectories') -> build.IncludeDirs:
+        return self.build_incdir_object(args[0], kwargs['is_system'])
+
+    def build_incdir_object(self, incdir_strings: T.List[str], is_system: bool = False) -> build.IncludeDirs:
+        if not isinstance(is_system, bool):
+            raise InvalidArguments('Is_system must be boolean.')
+        src_root = self.environment.get_source_dir()
+        build_root = self.environment.get_build_dir()
+        absbase_src = os.path.join(src_root, self.subdir)
+        absbase_build = os.path.join(build_root, self.subdir)
+
+        for a in incdir_strings:
+            if a.startswith(src_root):
+                raise InvalidArguments(textwrap.dedent('''\
+                    Tried to form an absolute path to a dir in the source tree.
+                    You should not do that but use relative paths instead, for
+                    directories that are part of your project.
+
+                    To get include path to any directory relative to the current dir do
+
+                    incdir = include_directories(dirname)
+
+                    After this incdir will contain both the current source dir as well as the
+                    corresponding build dir. It can then be used in any subdirectory and
+                    Meson will take care of all the busywork to make paths work.
+
+                    Dirname can even be '.' to mark the current directory. Though you should
+                    remember that the current source and build directories are always
+                    put in the include directories by default so you only need to do
+                    include_directories('.') if you intend to use the result in a
+                    different subdirectory.
+
+                    Note that this error message can also be triggered by
+                    external dependencies being installed within your source
+                    tree - it's not recommended to do this.
+                    '''))
+            else:
+                try:
+                    self.validate_within_subproject(self.subdir, a)
+                except InterpreterException:
+                    mlog.warning('include_directories sandbox violation!', location=self.current_node)
+                    print(textwrap.dedent(f'''\
+                        The project is trying to access the directory {a!r} which belongs to a different
+                        subproject. This is a problem as it hardcodes the relative paths of these two projects.
+                        This makes it impossible to compile the project in any other directory layout and also
+                        prevents the subproject from changing its own directory layout.
+
+                        Instead of poking directly at the internals the subproject should be executed and
+                        it should set a variable that the caller can then use. Something like:
+
+                        # In subproject
+                        some_dep = declare_dependency(include_directories: include_directories('include'))
+
+                        # In subproject wrap file
+                        [provide]
+                        some = some_dep
+
+                        # In parent project
+                        some_dep = dependency('some')
+                        executable(..., dependencies: [some_dep])
+
+                        This warning will become a hard error in a future Meson release.
+                        '''))
+            absdir_src = os.path.join(absbase_src, a)
+            absdir_build = os.path.join(absbase_build, a)
+            if not os.path.isdir(absdir_src) and not os.path.isdir(absdir_build):
+                raise InvalidArguments(f'Include dir {a} does not exist.')
+        i = build.IncludeDirs(self.subdir, incdir_strings, is_system)
+        return i
+
+    @typed_pos_args('add_test_setup', str)
+    @typed_kwargs(
+        'add_test_setup',
+        KwargInfo('exe_wrapper', ContainerTypeInfo(list, (str, ExternalProgram)), listify=True, default=[]),
+        KwargInfo('gdb', bool, default=False),
+        KwargInfo('timeout_multiplier', int, default=1),
+        KwargInfo('exclude_suites', ContainerTypeInfo(list, str), listify=True, default=[], since='0.57.0'),
+        KwargInfo('is_default', bool, default=False, since='0.49.0'),
+        ENV_KW,
+    )
+    def func_add_test_setup(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs: 'kwtypes.AddTestSetup') -> None:
+        setup_name = args[0]
+        if re.fullmatch('([_a-zA-Z][_0-9a-zA-Z]*:)?[_a-zA-Z][_0-9a-zA-Z]*', setup_name) is None:
+            raise InterpreterException('Setup name may only contain alphanumeric characters.')
+        if ":" not in setup_name:
+            setup_name = f'{(self.subproject if self.subproject else self.build.project_name)}:{setup_name}'
+
+        exe_wrapper: T.List[str] = []
+        for i in kwargs['exe_wrapper']:
+            if isinstance(i, str):
+                exe_wrapper.append(i)
+            else:
+                if not i.found():
+                    raise InterpreterException('Tried to use non-found executable.')
+                exe_wrapper += i.get_command()
+
+        timeout_multiplier = kwargs['timeout_multiplier']
+        if timeout_multiplier <= 0:
+            FeatureNew('add_test_setup() timeout_multiplier <= 0', '0.57.0').use(self.subproject)
+
+        if kwargs['is_default']:
+            if self.build.test_setup_default_name is not None:
+                raise InterpreterException(f'{self.build.test_setup_default_name!r} is already set as default. '
+                                           'is_default can be set to true only once')
+            self.build.test_setup_default_name = setup_name
+        self.build.test_setups[setup_name] = build.TestSetup(exe_wrapper, kwargs['gdb'], timeout_multiplier, kwargs['env'],
+                                                             kwargs['exclude_suites'])
+
+    @typed_pos_args('add_global_arguments', varargs=str)
+    @typed_kwargs('add_global_arguments', NATIVE_KW, LANGUAGE_KW)
+    def func_add_global_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
+        self._add_global_arguments(node, self.build.global_args[kwargs['native']], args[0], kwargs)
+
+    @typed_pos_args('add_global_link_arguments', varargs=str)
+    @typed_kwargs('add_global_arguments', NATIVE_KW, LANGUAGE_KW)
+    def func_add_global_link_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
+        self._add_global_arguments(node, self.build.global_link_args[kwargs['native']], args[0], kwargs)
+
+    @typed_pos_args('add_project_arguments', varargs=str)
+    @typed_kwargs('add_project_arguments', NATIVE_KW, LANGUAGE_KW)
+    def func_add_project_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
+        self._add_project_arguments(node, self.build.projects_args[kwargs['native']], args[0], kwargs)
+
+    @typed_pos_args('add_project_link_arguments', varargs=str)
+    @typed_kwargs('add_global_arguments', NATIVE_KW, LANGUAGE_KW)
+    def func_add_project_link_arguments(self, node: mparser.FunctionNode, args: T.Tuple[T.List[str]], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
+        self._add_project_arguments(node, self.build.projects_link_args[kwargs['native']], args[0], kwargs)
+
+    @FeatureNew('add_project_dependencies', '0.63.0')
+    @typed_pos_args('add_project_dependencies', varargs=dependencies.Dependency)
+    @typed_kwargs('add_project_dependencies', NATIVE_KW, LANGUAGE_KW)
+    def func_add_project_dependencies(self, node: mparser.FunctionNode, args: T.Tuple[T.List[dependencies.Dependency]], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
+        for_machine = kwargs['native']
+        for lang in kwargs['language']:
+            if lang not in self.compilers[for_machine]:
+                raise InvalidCode(f'add_project_dependencies() called before add_language() for language "{lang}"')
+
+        for d in dependencies.get_leaf_external_dependencies(args[0]):
+            compile_args = list(d.get_compile_args())
+            system_incdir = d.get_include_type() == 'system'
+            for i in d.get_include_dirs():
+                for lang in kwargs['language']:
+                    comp = self.coredata.compilers[for_machine][lang]
+                    for idir in i.to_string_list(self.environment.get_source_dir(), self.environment.get_build_dir()):
+                        compile_args.extend(comp.get_include_args(idir, system_incdir))
+
+            self._add_project_arguments(node, self.build.projects_args[for_machine], compile_args, kwargs)
+            self._add_project_arguments(node, self.build.projects_link_args[for_machine], d.get_link_args(), kwargs)
+
+    def _warn_about_builtin_args(self, args: T.List[str]) -> None:
+        # -Wpedantic is deliberately not included, since some people want to use it but not use -Wextra
+        # see e.g.
+        # https://github.com/mesonbuild/meson/issues/3275#issuecomment-641354956
+        # https://github.com/mesonbuild/meson/issues/3742
+        warnargs = ('/W1', '/W2', '/W3', '/W4', '/Wall', '-Wall', '-Wextra')
+        optargs = ('-O0', '-O2', '-O3', '-Os', '-Oz', '/O1', '/O2', '/Os')
+        for arg in args:
+            if arg in warnargs:
+                mlog.warning(f'Consider using the built-in warning_level option instead of using "{arg}".',
+                             location=self.current_node)
+            elif arg in optargs:
+                mlog.warning(f'Consider using the built-in optimization level instead of using "{arg}".',
+                             location=self.current_node)
+            elif arg == '-Werror':
+                mlog.warning(f'Consider using the built-in werror option instead of using "{arg}".',
+                             location=self.current_node)
+            elif arg == '-g':
+                mlog.warning(f'Consider using the built-in debug option instead of using "{arg}".',
+                             location=self.current_node)
+            elif arg.startswith('-fsanitize'):
+                mlog.warning(f'Consider using the built-in option for sanitizers instead of using "{arg}".',
+                             location=self.current_node)
+            elif arg.startswith('-std=') or arg.startswith('/std:'):
+                mlog.warning(f'Consider using the built-in option for language standard version instead of using "{arg}".',
+                             location=self.current_node)
+
+    def _add_global_arguments(self, node: mparser.FunctionNode, argsdict: T.Dict[str, T.List[str]],
+                              args: T.List[str], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
+        if self.is_subproject():
+            msg = f'Function \'{node.func_name}\' cannot be used in subprojects because ' \
+                  'there is no way to make that reliable.\nPlease only call ' \
+                  'this if is_subproject() returns false. Alternatively, ' \
+                  'define a variable that\ncontains your language-specific ' \
+                  'arguments and add it to the appropriate *_args kwarg ' \
+                  'in each target.'
+            raise InvalidCode(msg)
+        frozen = self.project_args_frozen or self.global_args_frozen
+        self._add_arguments(node, argsdict, frozen, args, kwargs)
+
+    def _add_project_arguments(self, node: mparser.FunctionNode, argsdict: T.Dict[str, T.Dict[str, T.List[str]]],
+                               args: T.List[str], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
+        if self.subproject not in argsdict:
+            argsdict[self.subproject] = {}
+        self._add_arguments(node, argsdict[self.subproject],
+                            self.project_args_frozen, args, kwargs)
+
+    def _add_arguments(self, node: mparser.FunctionNode, argsdict: T.Dict[str, T.List[str]],
+                       args_frozen: bool, args: T.List[str], kwargs: 'kwtypes.FuncAddProjectArgs') -> None:
+        if args_frozen:
+            msg = f'Tried to use \'{node.func_name}\' after a build target has been declared.\n' \
+                  'This is not permitted. Please declare all arguments before your targets.'
+            raise InvalidCode(msg)
+
+        self._warn_about_builtin_args(args)
+
+        for lang in kwargs['language']:
+            argsdict[lang] = argsdict.get(lang, []) + args
+
+    @noArgsFlattening
+    @typed_pos_args('environment', optargs=[(str, list, dict)])
+    @typed_kwargs('environment', ENV_METHOD_KW, ENV_SEPARATOR_KW.evolve(since='0.62.0'))
+    def func_environment(self, node: mparser.FunctionNode, args: T.Tuple[T.Union[None, str, T.List['TYPE_var'], T.Dict[str, 'TYPE_var']]],
+                         kwargs: 'TYPE_kwargs') -> build.EnvironmentVariables:
+        init = args[0]
+        if init is not None:
+            FeatureNew.single_use('environment positional arguments', '0.52.0', self.subproject, location=node)
+            msg = ENV_KW.validator(init)
+            if msg:
+                raise InvalidArguments(f'"environment": {msg}')
+            if isinstance(init, dict) and any(i for i in init.values() if isinstance(i, list)):
+                FeatureNew.single_use('List of string in dictionary value', '0.62.0', self.subproject, location=node)
+            return env_convertor_with_method(init, kwargs['method'], kwargs['separator'])
+        return build.EnvironmentVariables()
+
+    @typed_pos_args('join_paths', varargs=str, min_varargs=1)
+    @noKwargs
+    def func_join_paths(self, node: mparser.BaseNode, args: T.Tuple[T.List[str]], kwargs: 'TYPE_kwargs') -> str:
+        parts = args[0]
+        other = os.path.join('', *parts[1:]).replace('\\', '/')
+        ret = os.path.join(*parts).replace('\\', '/')
+        if isinstance(parts[0], P_OBJ.DependencyVariableString) and '..' not in other:
+            return P_OBJ.DependencyVariableString(ret)
+        elif isinstance(parts[0], P_OBJ.OptionString):
+            name = os.path.join(parts[0].optname, other)
+            return P_OBJ.OptionString(ret, name)
+        else:
+            return ret
+
+    def run(self) -> None:
+        super().run()
+        mlog.log('Build targets in project:', mlog.bold(str(len(self.build.targets))))
+        FeatureNew.report(self.subproject)
+        FeatureDeprecated.report(self.subproject)
+        FeatureBroken.report(self.subproject)
+        if not self.is_subproject():
+            self.print_extra_warnings()
+            self._print_summary()
+
+    def print_extra_warnings(self) -> None:
+        # TODO cross compilation
+        for c in self.coredata.compilers.host.values():
+            if c.get_id() == 'clang':
+                self.check_clang_asan_lundef()
+                break
+
+    def check_clang_asan_lundef(self) -> None:
+        if OptionKey('b_lundef') not in self.coredata.options:
+            return
+        if OptionKey('b_sanitize') not in self.coredata.options:
+            return
+        if (self.coredata.options[OptionKey('b_lundef')].value and
+                self.coredata.options[OptionKey('b_sanitize')].value != 'none'):
+            value = self.coredata.options[OptionKey('b_sanitize')].value
+            mlog.warning(textwrap.dedent(f'''\
+                    Trying to use {value} sanitizer on Clang with b_lundef.
+                    This will probably not work.
+                    Try setting b_lundef to false instead.'''),
+                location=self.current_node)  # noqa: E128
+
+    # Check that the indicated file is within the same subproject
+    # as we currently are. This is to stop people doing
+    # nasty things like:
+    #
+    # f = files('../../master_src/file.c')
+    #
+    # Note that this is validated only when the file
+    # object is generated. The result can be used in a different
+    # subproject than it is defined in (due to e.g. a
+    # declare_dependency).
+    def validate_within_subproject(self, subdir, fname):
+        srcdir = Path(self.environment.source_dir)
+        builddir = Path(self.environment.build_dir)
+        if isinstance(fname, P_OBJ.DependencyVariableString):
+            def validate_installable_file(fpath: Path) -> bool:
+                installablefiles: T.Set[Path] = set()
+                for d in self.build.data:
+                    for s in d.sources:
+                        installablefiles.add(Path(s.absolute_path(srcdir, builddir)))
+                installabledirs = [str(Path(srcdir, s.source_subdir)) for s in self.build.install_dirs]
+                if fpath in installablefiles:
+                    return True
+                for d in installabledirs:
+                    if str(fpath).startswith(d):
+                        return True
+                return False
+
+            norm = Path(fname)
+            # variables built from a dep.get_variable are allowed to refer to
+            # subproject files, as long as they are scheduled to be installed.
+            if validate_installable_file(norm):
+                return
+        norm = Path(os.path.abspath(Path(srcdir, subdir, fname)))
+        if os.path.isdir(norm):
+            inputtype = 'directory'
+        else:
+            inputtype = 'file'
+        if InterpreterRuleRelaxation.ALLOW_BUILD_DIR_FILE_REFERENCES in self.relaxations and builddir in norm.parents:
+            return
+        if srcdir not in norm.parents:
+            # Grabbing files outside the source tree is ok.
+            # This is for vendor stuff like:
+            #
+            # /opt/vendorsdk/src/file_with_license_restrictions.c
+            return
+        project_root = Path(srcdir, self.root_subdir)
+        subproject_dir = project_root / self.subproject_dir
+        if norm == project_root:
+            return
+        if project_root not in norm.parents:
+            raise InterpreterException(f'Sandbox violation: Tried to grab {inputtype} {norm.name} outside current (sub)project.')
+        if subproject_dir == norm or subproject_dir in norm.parents:
+            raise InterpreterException(f'Sandbox violation: Tried to grab {inputtype} {norm.name} from a nested subproject.')
+
+    @T.overload
+    def source_strings_to_files(self, sources: T.List['mesonlib.FileOrString'], strict: bool = True) -> T.List['mesonlib.File']: ...
+
+    @T.overload
+    def source_strings_to_files(self, sources: T.List['mesonlib.FileOrString'], strict: bool = False) -> T.List['mesonlib.FileOrString']: ... # noqa: F811
+
+    @T.overload
+    def source_strings_to_files(self, sources: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]]) -> T.List[T.Union[mesonlib.File, build.GeneratedTypes]]: ... # noqa: F811
+
+    @T.overload
+    def source_strings_to_files(self, sources: T.List['SourceInputs'], strict: bool = True) -> T.List['SourceOutputs']: ... # noqa: F811
+
+    def source_strings_to_files(self, sources: T.List['SourceInputs'], strict: bool = True) -> T.List['SourceOutputs']: # noqa: F811
+        """Lower inputs to a list of Targets and Files, replacing any strings.
+
+        :param sources: A raw (Meson DSL) list of inputs (targets, files, and
+            strings)
+        :raises InterpreterException: if any of the inputs are of an invalid type
+        :return: A list of Targets and Files
+        """
+        mesonlib.check_direntry_issues(sources)
+        if not isinstance(sources, list):
+            sources = [sources]
+        results: T.List['SourceOutputs'] = []
+        for s in sources:
+            if isinstance(s, str):
+                if not strict and s.startswith(self.environment.get_build_dir()):
+                    results.append(s)
+                    mlog.warning(f'Source item {s!r} cannot be converted to File object, because it is a generated file. '
+                                 'This will become a hard error in the future.', location=self.current_node)
+                else:
+                    self.validate_within_subproject(self.subdir, s)
+                    results.append(mesonlib.File.from_source_file(self.environment.source_dir, self.subdir, s))
+            elif isinstance(s, mesonlib.File):
+                results.append(s)
+            elif isinstance(s, (build.GeneratedList, build.BuildTarget,
+                                build.CustomTargetIndex, build.CustomTarget,
+                                build.ExtractedObjects, build.StructuredSources)):
+                results.append(s)
+            else:
+                raise InterpreterException(f'Source item is {s!r} instead of '
+                                           'string or File-type object')
+        return results
+
+    @staticmethod
+    def validate_forbidden_targets(name: str) -> None:
+        if name.startswith('meson-internal__'):
+            raise InvalidArguments("Target names starting with 'meson-internal__' are reserved "
+                                   "for Meson's internal use. Please rename.")
+        if name.startswith('meson-') and '.' not in name:
+            raise InvalidArguments("Target names starting with 'meson-' and without a file extension "
+                                   "are reserved for Meson's internal use. Please rename.")
+        if name in coredata.FORBIDDEN_TARGET_NAMES:
+            raise InvalidArguments(f"Target name '{name}' is reserved for Meson's "
+                                   "internal use. Please rename.")
+
+    def add_target(self, name: str, tobj: build.Target) -> None:
+        if self.backend.name == 'none':
+            raise InterpreterException('Install-only backend cannot generate target rules, try using `--backend=ninja`.')
+        if name == '':
+            raise InterpreterException('Target name must not be empty.')
+        if name.strip() == '':
+            raise InterpreterException('Target name must not consist only of whitespace.')
+        if has_path_sep(name):
+            pathseg = os.path.join(self.subdir, os.path.split(name)[0])
+            if os.path.exists(os.path.join(self.source_root, pathseg)):
+                raise InvalidArguments(textwrap.dedent(f'''\
+                    Target "{name}" has a path segment pointing to directory "{pathseg}". This is an error.
+                    To define a target that builds in that directory you must define it
+                    in the meson.build file in that directory.
+            '''))
+        self.validate_forbidden_targets(name)
+        # To permit an executable and a shared library to have the
+        # same name, such as "foo.exe" and "libfoo.a".
+        idname = tobj.get_id()
+        if idname in self.build.targets:
+            raise InvalidCode(f'Tried to create target "{name}", but a target of that name already exists.')
+
+        if isinstance(tobj, build.BuildTarget):
+            self.add_languages(tobj.missing_languages, True, tobj.for_machine)
+            tobj.process_compilers_late()
+            self.add_stdlib_info(tobj)
+
+        self.build.targets[idname] = tobj
+        if idname not in self.coredata.target_guids:
+            self.coredata.target_guids[idname] = str(uuid.uuid4()).upper()
+
+    @FeatureNew('both_libraries', '0.46.0')
+    def build_both_libraries(self, node, args, kwargs):
+        shared_lib = self.build_target(node, args, kwargs, build.SharedLibrary)
+        static_lib = self.build_target(node, args, kwargs, build.StaticLibrary)
+
+        if self.backend.name == 'xcode':
+            # Xcode is a bit special in that you can't (at least for the moment)
+            # form a library only from object file inputs. The simple but inefficient
+            # solution is to use the sources directly. This will lead to them being
+            # built twice. This is unfortunate and slow, but at least it works.
+            # Feel free to submit patches to get this fixed if it is an
+            # issue for you.
+            reuse_object_files = False
+        else:
+            reuse_object_files = static_lib.pic
+
+        if reuse_object_files:
+            # Replace sources with objects from the shared library to avoid
+            # building them twice. We post-process the static library instead of
+            # removing sources from args because sources could also come from
+            # any InternalDependency, see BuildTarget.add_deps().
+            static_lib.objects.append(build.ExtractedObjects(shared_lib, shared_lib.sources, shared_lib.generated, []))
+            static_lib.sources = []
+            static_lib.generated = []
+            # Compilers with no corresponding sources confuses the backend.
+            # Keep only compilers used for linking
+            static_lib.compilers = {k: v for k, v in static_lib.compilers.items() if k in compilers.clink_langs}
+
+        return build.BothLibraries(shared_lib, static_lib)
+
+    def build_library(self, node, args, kwargs):
+        default_library = self.coredata.get_option(OptionKey('default_library', subproject=self.subproject))
+        if default_library == 'shared':
+            return self.build_target(node, args, kwargs, build.SharedLibrary)
+        elif default_library == 'static':
+            return self.build_target(node, args, kwargs, build.StaticLibrary)
+        elif default_library == 'both':
+            return self.build_both_libraries(node, args, kwargs)
+        else:
+            raise InterpreterException(f'Unknown default_library value: {default_library}.')
+
+    def build_target(self, node: mparser.BaseNode, args, kwargs, targetclass):
+        @FeatureNewKwargs('build target', '1.2.0', ['rust_dependency_map'])
+        @FeatureNewKwargs('build target', '0.42.0', ['rust_crate_type', 'build_rpath', 'implicit_include_directories'])
+        @FeatureNewKwargs('build target', '0.41.0', ['rust_args'])
+        @FeatureNewKwargs('build target', '0.38.0', ['build_by_default'])
+        @FeatureNewKwargs('build target', '0.48.0', ['gnu_symbol_visibility'])
+        def build_target_decorator_caller(self, node, args, kwargs):
+            return True
+
+        build_target_decorator_caller(self, node, args, kwargs)
+
+        name, sources = args
+        for_machine = self.machine_from_native_kwarg(kwargs)
+        if 'sources' in kwargs:
+            sources += listify(kwargs['sources'])
+        if any(isinstance(s, build.BuildTarget) for s in sources):
+            FeatureBroken.single_use('passing references to built targets as a source file', '1.1.0', self.subproject,
+                                     'Consider using `link_with` or `link_whole` if you meant to link, or dropping them as otherwise they are ignored.',
+                                     node)
+        if any(isinstance(s, build.ExtractedObjects) for s in sources):
+            FeatureBroken.single_use('passing object files as sources', '1.1.0', self.subproject,
+                                     'Pass these to the `objects` keyword instead, they are ignored when passed as sources.',
+                                     node)
+        # Go ahead and drop these here, since they're only allowed through for
+        # backwards compatibility anyway
+        sources = [s for s in sources
+                   if not isinstance(s, (build.BuildTarget, build.ExtractedObjects))]
+        sources = self.source_strings_to_files(sources)
+        objs = extract_as_list(kwargs, 'objects')
+        kwargs['dependencies'] = extract_as_list(kwargs, 'dependencies')
+        kwargs['install_mode'] = self._get_kwarg_install_mode(kwargs)
+        if 'extra_files' in kwargs:
+            ef = extract_as_list(kwargs, 'extra_files')
+            kwargs['extra_files'] = self.source_strings_to_files(ef)
+        self.check_sources_exist(os.path.join(self.source_root, self.subdir), sources)
+        if targetclass not in {build.Executable, build.SharedLibrary, build.SharedModule, build.StaticLibrary, build.Jar}:
+            mlog.debug('Unknown target type:', str(targetclass))
+            raise RuntimeError('Unreachable code')
+        self.kwarg_strings_to_includedirs(kwargs)
+
+        # Filter out kwargs from other target types. For example 'soversion'
+        # passed to library() when default_library == 'static'.
+        kwargs = {k: v for k, v in kwargs.items() if k in targetclass.known_kwargs}
+
+        srcs: T.List['SourceInputs'] = []
+        struct: T.Optional[build.StructuredSources] = build.StructuredSources()
+        for s in sources:
+            if isinstance(s, build.StructuredSources):
+                struct = struct + s
+            else:
+                srcs.append(s)
+
+        if not struct:
+            struct = None
+        else:
+            # Validate that we won't end up with two outputs with the same name.
+            # i.e, don't allow:
+            # [structured_sources('foo/bar.rs'), structured_sources('bar/bar.rs')]
+            for v in struct.sources.values():
+                outputs: T.Set[str] = set()
+                for f in v:
+                    o: T.List[str]
+                    if isinstance(f, str):
+                        o = [os.path.basename(f)]
+                    elif isinstance(f, mesonlib.File):
+                        o = [f.fname]
+                    else:
+                        o = f.get_outputs()
+                    conflicts = outputs.intersection(o)
+                    if conflicts:
+                        raise InvalidArguments.from_node(
+                            f"Conflicting sources in structured sources: {', '.join(sorted(conflicts))}",
+                            node=node)
+                    outputs.update(o)
+
+        kwargs['include_directories'] = self.extract_incdirs(kwargs)
+        target = targetclass(name, self.subdir, self.subproject, for_machine, srcs, struct, objs,
+                             self.environment, self.compilers[for_machine], kwargs)
+        target.project_version = self.project_version
+
+        self.add_target(name, target)
+        self.project_args_frozen = True
+        return target
+
+    def kwarg_strings_to_includedirs(self, kwargs):
+        if 'd_import_dirs' in kwargs:
+            items = mesonlib.extract_as_list(kwargs, 'd_import_dirs')
+            cleaned_items = []
+            for i in items:
+                if isinstance(i, str):
+                    # BW compatibility. This was permitted so we must support it
+                    # for a few releases so people can transition to "correct"
+                    # path declarations.
+                    if os.path.normpath(i).startswith(self.environment.get_source_dir()):
+                        mlog.warning('''Building a path to the source dir is not supported. Use a relative path instead.
+This will become a hard error in the future.''', location=self.current_node)
+                        i = os.path.relpath(i, os.path.join(self.environment.get_source_dir(), self.subdir))
+                        i = self.build_incdir_object([i])
+                cleaned_items.append(i)
+            kwargs['d_import_dirs'] = cleaned_items
+
+    def add_stdlib_info(self, target):
+        for l in target.compilers.keys():
+            dep = self.build.stdlibs[target.for_machine].get(l, None)
+            if dep:
+                target.add_deps(dep)
+
+    def check_sources_exist(self, subdir, sources):
+        for s in sources:
+            if not isinstance(s, str):
+                continue # This means a generated source and they always exist.
+            fname = os.path.join(subdir, s)
+            if not os.path.isfile(fname):
+                raise InterpreterException(f'Tried to add non-existing source file {s}.')
+
+    # Only permit object extraction from the same subproject
+    def validate_extraction(self, buildtarget: mesonlib.HoldableObject) -> None:
+        if self.subproject != buildtarget.subproject:
+            raise InterpreterException('Tried to extract objects from a different subproject.')
+
+    def is_subproject(self) -> bool:
+        return self.subproject != ''
+
+    @typed_pos_args('set_variable', str, object)
+    @noKwargs
+    @noArgsFlattening
+    @noSecondLevelHolderResolving
+    def func_set_variable(self, node: mparser.BaseNode, args: T.Tuple[str, object], kwargs: 'TYPE_kwargs') -> None:
+        varname, value = args
+        self.set_variable(varname, value, holderify=True)
+
+    @typed_pos_args('get_variable', (str, Disabler), optargs=[object])
+    @noKwargs
+    @noArgsFlattening
+    @unholder_return
+    def func_get_variable(self, node: mparser.BaseNode, args: T.Tuple[T.Union[str, Disabler], T.Optional[object]],
+                          kwargs: 'TYPE_kwargs') -> 'TYPE_var':
+        varname, fallback = args
+        if isinstance(varname, Disabler):
+            return varname
+
+        try:
+            return self.variables[varname]
+        except KeyError:
+            if fallback is not None:
+                return self._holderify(fallback)
+        raise InterpreterException(f'Tried to get unknown variable "{varname}".')
+
+    @typed_pos_args('is_variable', str)
+    @noKwargs
+    def func_is_variable(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool:
+        return args[0] in self.variables
+
+    @FeatureNew('unset_variable', '0.60.0')
+    @typed_pos_args('unset_variable', str)
+    @noKwargs
+    def func_unset_variable(self, node: mparser.BaseNode, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> None:
+        varname = args[0]
+        try:
+            del self.variables[varname]
+        except KeyError:
+            raise InterpreterException(f'Tried to unset unknown variable "{varname}".')
+
+    @staticmethod
+    def machine_from_native_kwarg(kwargs: T.Dict[str, T.Any]) -> MachineChoice:
+        native = kwargs.get('native', False)
+        if not isinstance(native, bool):
+            raise InvalidArguments('Argument to "native" must be a boolean.')
+        return MachineChoice.BUILD if native else MachineChoice.HOST
+
+    @FeatureNew('is_disabler', '0.52.0')
+    @typed_pos_args('is_disabler', object)
+    @noKwargs
+    def func_is_disabler(self, node: mparser.BaseNode, args: T.Tuple[object], kwargs: 'TYPE_kwargs') -> bool:
+        return isinstance(args[0], Disabler)
+
+    @noKwargs
+    @FeatureNew('range', '0.58.0')
+    @typed_pos_args('range', int, optargs=[int, int])
+    def func_range(self, node, args: T.Tuple[int, T.Optional[int], T.Optional[int]], kwargs: T.Dict[str, T.Any]) -> P_OBJ.RangeHolder:
+        start, stop, step = args
+        # Just like Python's range, we allow range(stop), range(start, stop), or
+        # range(start, stop, step)
+        if stop is None:
+            stop = start
+            start = 0
+        if step is None:
+            step = 1
+        # This is more strict than Python's range()
+        if start < 0:
+            raise InterpreterException('start cannot be negative')
+        if stop < start:
+            raise InterpreterException('stop cannot be less than start')
+        if step < 1:
+            raise InterpreterException('step must be >=1')
+        return P_OBJ.RangeHolder(start, stop, step, subproject=self.subproject)
diff --git a/vendored-meson/meson/mesonbuild/interpreter/interpreterobjects.py b/vendored-meson/meson/mesonbuild/interpreter/interpreterobjects.py
new file mode 100644
index 000000000000..fac3b0e55ad4
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreter/interpreterobjects.py
@@ -0,0 +1,1054 @@
+from __future__ import annotations
+import os
+import shlex
+import subprocess
+import copy
+import textwrap
+
+from pathlib import Path, PurePath
+
+from .. import mesonlib
+from .. import coredata
+from .. import build
+from .. import mlog
+
+from ..modules import ModuleReturnValue, ModuleObject, ModuleState, ExtensionModule
+from ..backend.backends import TestProtocol
+from ..interpreterbase import (
+                               ContainerTypeInfo, KwargInfo, MesonOperator,
+                               MesonInterpreterObject, ObjectHolder, MutableInterpreterObject,
+                               FeatureNew, FeatureDeprecated,
+                               typed_pos_args, typed_kwargs, typed_operator,
+                               noArgsFlattening, noPosargs, noKwargs, unholder_return,
+                               flatten, resolve_second_level_holders, InterpreterException, InvalidArguments, InvalidCode)
+from ..interpreter.type_checking import NoneType, ENV_SEPARATOR_KW
+from ..dependencies import Dependency, ExternalLibrary, InternalDependency
+from ..programs import ExternalProgram
+from ..mesonlib import HoldableObject, OptionKey, listify, Popen_safe
+
+import typing as T
+
+if T.TYPE_CHECKING:
+    from . import kwargs
+    from ..cmake.interpreter import CMakeInterpreter
+    from ..envconfig import MachineInfo
+    from ..interpreterbase import FeatureCheckBase, InterpreterObject, SubProject, TYPE_var, TYPE_kwargs, TYPE_nvar, TYPE_nkwargs
+    from .interpreter import Interpreter
+
+    from typing_extensions import TypedDict
+
+    class EnvironmentSeparatorKW(TypedDict):
+
+        separator: str
+
+_ERROR_MSG_KW: KwargInfo[T.Optional[str]] = KwargInfo('error_message', (str, NoneType))
+
+
+def extract_required_kwarg(kwargs: 'kwargs.ExtractRequired',
+                           subproject: 'SubProject',
+                           feature_check: T.Optional[FeatureCheckBase] = None,
+                           default: bool = True) -> T.Tuple[bool, bool, T.Optional[str]]:
+    val = kwargs.get('required', default)
+    disabled = False
+    required = False
+    feature: T.Optional[str] = None
+    if isinstance(val, coredata.UserFeatureOption):
+        if not feature_check:
+            feature_check = FeatureNew('User option "feature"', '0.47.0')
+        feature_check.use(subproject)
+        feature = val.name
+        if val.is_disabled():
+            disabled = True
+        elif val.is_enabled():
+            required = True
+    elif isinstance(val, bool):
+        required = val
+    else:
+        raise InterpreterException('required keyword argument must be boolean or a feature option')
+
+    # Keep boolean value in kwargs to simplify other places where this kwarg is
+    # checked.
+    # TODO: this should be removed, and those callers should learn about FeatureOptions
+    kwargs['required'] = required
+
+    return disabled, required, feature
+
+def extract_search_dirs(kwargs: 'kwargs.ExtractSearchDirs') -> T.List[str]:
+    search_dirs_str = mesonlib.stringlistify(kwargs.get('dirs', []))
+    search_dirs = [Path(d).expanduser() for d in search_dirs_str]
+    for d in search_dirs:
+        if mesonlib.is_windows() and d.root.startswith('\\'):
+            # a Unix-path starting with `/` that is not absolute on Windows.
+            # discard without failing for end-user ease of cross-platform directory arrays
+            continue
+        if not d.is_absolute():
+            raise InvalidCode(f'Search directory {d} is not an absolute path.')
+    return [str(s) for s in search_dirs]
+
+class FeatureOptionHolder(ObjectHolder[coredata.UserFeatureOption]):
+    def __init__(self, option: coredata.UserFeatureOption, interpreter: 'Interpreter'):
+        super().__init__(option, interpreter)
+        if option and option.is_auto():
+            # TODO: we need to cast here because options is not a TypedDict
+            auto = T.cast('coredata.UserFeatureOption', self.env.coredata.options[OptionKey('auto_features')])
+            self.held_object = copy.copy(auto)
+            self.held_object.name = option.name
+        self.methods.update({'enabled': self.enabled_method,
+                             'disabled': self.disabled_method,
+                             'allowed': self.allowed_method,
+                             'auto': self.auto_method,
+                             'require': self.require_method,
+                             'disable_auto_if': self.disable_auto_if_method,
+                             'enable_auto_if': self.enable_auto_if_method,
+                             'disable_if': self.disable_if_method,
+                             'enable_if': self.enable_if_method,
+                             })
+
+    @property
+    def value(self) -> str:
+        return 'disabled' if not self.held_object else self.held_object.value
+
+    def as_disabled(self) -> coredata.UserFeatureOption:
+        disabled = copy.deepcopy(self.held_object)
+        disabled.value = 'disabled'
+        return disabled
+
+    def as_enabled(self) -> coredata.UserFeatureOption:
+        enabled = copy.deepcopy(self.held_object)
+        enabled.value = 'enabled'
+        return enabled
+
+    @noPosargs
+    @noKwargs
+    def enabled_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return self.value == 'enabled'
+
+    @noPosargs
+    @noKwargs
+    def disabled_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return self.value == 'disabled'
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('feature_option.allowed()', '0.59.0')
+    def allowed_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return self.value != 'disabled'
+
+    @noPosargs
+    @noKwargs
+    def auto_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return self.value == 'auto'
+
+    def _disable_if(self, condition: bool, message: T.Optional[str]) -> coredata.UserFeatureOption:
+        if not condition:
+            return copy.deepcopy(self.held_object)
+
+        if self.value == 'enabled':
+            err_msg = f'Feature {self.held_object.name} cannot be enabled'
+            if message:
+                err_msg += f': {message}'
+            raise InterpreterException(err_msg)
+        return self.as_disabled()
+
+    @FeatureNew('feature_option.require()', '0.59.0')
+    @typed_pos_args('feature_option.require', bool)
+    @typed_kwargs(
+        'feature_option.require',
+        _ERROR_MSG_KW,
+    )
+    def require_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequire') -> coredata.UserFeatureOption:
+        return self._disable_if(not args[0], kwargs['error_message'])
+
+    @FeatureNew('feature_option.disable_if()', '1.1.0')
+    @typed_pos_args('feature_option.disable_if', bool)
+    @typed_kwargs(
+        'feature_option.disable_if',
+        _ERROR_MSG_KW,
+    )
+    def disable_if_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequire') -> coredata.UserFeatureOption:
+        return self._disable_if(args[0], kwargs['error_message'])
+
+    @FeatureNew('feature_option.enable_if()', '1.1.0')
+    @typed_pos_args('feature_option.enable_if', bool)
+    @typed_kwargs(
+        'feature_option.enable_if',
+        _ERROR_MSG_KW,
+    )
+    def enable_if_method(self, args: T.Tuple[bool], kwargs: 'kwargs.FeatureOptionRequire') -> coredata.UserFeatureOption:
+        if not args[0]:
+            return copy.deepcopy(self.held_object)
+
+        if self.value == 'disabled':
+            err_msg = f'Feature {self.held_object.name} cannot be disabled'
+            if kwargs['error_message']:
+                err_msg += f': {kwargs["error_message"]}'
+            raise InterpreterException(err_msg)
+        return self.as_enabled()
+
+    @FeatureNew('feature_option.disable_auto_if()', '0.59.0')
+    @noKwargs
+    @typed_pos_args('feature_option.disable_auto_if', bool)
+    def disable_auto_if_method(self, args: T.Tuple[bool], kwargs: TYPE_kwargs) -> coredata.UserFeatureOption:
+        return copy.deepcopy(self.held_object) if self.value != 'auto' or not args[0] else self.as_disabled()
+
+    @FeatureNew('feature_option.enable_auto_if()', '1.1.0')
+    @noKwargs
+    @typed_pos_args('feature_option.enable_auto_if', bool)
+    def enable_auto_if_method(self, args: T.Tuple[bool], kwargs: TYPE_kwargs) -> coredata.UserFeatureOption:
+        return self.as_enabled() if self.value == 'auto' and args[0] else copy.deepcopy(self.held_object)
+
+
+class RunProcess(MesonInterpreterObject):
+
+    def __init__(self,
+                 cmd: ExternalProgram,
+                 args: T.List[str],
+                 env: build.EnvironmentVariables,
+                 source_dir: str,
+                 build_dir: str,
+                 subdir: str,
+                 mesonintrospect: T.List[str],
+                 in_builddir: bool = False,
+                 check: bool = False,
+                 capture: bool = True) -> None:
+        super().__init__()
+        if not isinstance(cmd, ExternalProgram):
+            raise AssertionError('BUG: RunProcess must be passed an ExternalProgram')
+        self.capture = capture
+        self.returncode, self.stdout, self.stderr = self.run_command(cmd, args, env, source_dir, build_dir, subdir, mesonintrospect, in_builddir, check)
+        self.methods.update({'returncode': self.returncode_method,
+                             'stdout': self.stdout_method,
+                             'stderr': self.stderr_method,
+                             })
+
+    def run_command(self,
+                    cmd: ExternalProgram,
+                    args: T.List[str],
+                    env: build.EnvironmentVariables,
+                    source_dir: str,
+                    build_dir: str,
+                    subdir: str,
+                    mesonintrospect: T.List[str],
+                    in_builddir: bool,
+                    check: bool = False) -> T.Tuple[int, str, str]:
+        command_array = cmd.get_command() + args
+        menv = {'MESON_SOURCE_ROOT': source_dir,
+                'MESON_BUILD_ROOT': build_dir,
+                'MESON_SUBDIR': subdir,
+                'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in mesonintrospect]),
+                }
+        if in_builddir:
+            cwd = os.path.join(build_dir, subdir)
+        else:
+            cwd = os.path.join(source_dir, subdir)
+        child_env = os.environ.copy()
+        child_env.update(menv)
+        child_env = env.get_env(child_env)
+        stdout = subprocess.PIPE if self.capture else subprocess.DEVNULL
+        mlog.debug('Running command:', mesonlib.join_args(command_array))
+        try:
+            p, o, e = Popen_safe(command_array, stdout=stdout, env=child_env, cwd=cwd)
+            if self.capture:
+                mlog.debug('--- stdout ---')
+                mlog.debug(o)
+            else:
+                o = ''
+                mlog.debug('--- stdout disabled ---')
+            mlog.debug('--- stderr ---')
+            mlog.debug(e)
+            mlog.debug('')
+
+            if check and p.returncode != 0:
+                raise InterpreterException('Command `{}` failed with status {}.'.format(mesonlib.join_args(command_array), p.returncode))
+
+            return p.returncode, o, e
+        except FileNotFoundError:
+            raise InterpreterException('Could not execute command `%s`.' % mesonlib.join_args(command_array))
+
+    @noPosargs
+    @noKwargs
+    def returncode_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:
+        return self.returncode
+
+    @noPosargs
+    @noKwargs
+    def stdout_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.stdout
+
+    @noPosargs
+    @noKwargs
+    def stderr_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.stderr
+
+class EnvironmentVariablesHolder(ObjectHolder[build.EnvironmentVariables], MutableInterpreterObject):
+
+    def __init__(self, obj: build.EnvironmentVariables, interpreter: 'Interpreter'):
+        super().__init__(obj, interpreter)
+        self.methods.update({'set': self.set_method,
+                             'append': self.append_method,
+                             'prepend': self.prepend_method,
+                             })
+
+    def __repr__(self) -> str:
+        repr_str = "<{0}: {1}>"
+        return repr_str.format(self.__class__.__name__, self.held_object.envvars)
+
+    def __deepcopy__(self, memo: T.Dict[str, object]) -> 'EnvironmentVariablesHolder':
+        # Avoid trying to copy the interpreter
+        return EnvironmentVariablesHolder(copy.deepcopy(self.held_object), self.interpreter)
+
+    def warn_if_has_name(self, name: str) -> None:
+        # Multiple append/prepend operations was not supported until 0.58.0.
+        if self.held_object.has_name(name):
+            m = f'Overriding previous value of environment variable {name!r} with a new one'
+            FeatureNew(m, '0.58.0').use(self.subproject, self.current_node)
+
+    @typed_pos_args('environment.set', str, varargs=str, min_varargs=1)
+    @typed_kwargs('environment.set', ENV_SEPARATOR_KW)
+    def set_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'EnvironmentSeparatorKW') -> None:
+        name, values = args
+        self.held_object.set(name, values, kwargs['separator'])
+
+    @typed_pos_args('environment.append', str, varargs=str, min_varargs=1)
+    @typed_kwargs('environment.append', ENV_SEPARATOR_KW)
+    def append_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'EnvironmentSeparatorKW') -> None:
+        name, values = args
+        self.warn_if_has_name(name)
+        self.held_object.append(name, values, kwargs['separator'])
+
+    @typed_pos_args('environment.prepend', str, varargs=str, min_varargs=1)
+    @typed_kwargs('environment.prepend', ENV_SEPARATOR_KW)
+    def prepend_method(self, args: T.Tuple[str, T.List[str]], kwargs: 'EnvironmentSeparatorKW') -> None:
+        name, values = args
+        self.warn_if_has_name(name)
+        self.held_object.prepend(name, values, kwargs['separator'])
+
+
+_CONF_DATA_SET_KWS: KwargInfo[T.Optional[str]] = KwargInfo('description', (str, NoneType))
+
+
+class ConfigurationDataHolder(ObjectHolder[build.ConfigurationData], MutableInterpreterObject):
+
+    def __init__(self, obj: build.ConfigurationData, interpreter: 'Interpreter'):
+        super().__init__(obj, interpreter)
+        self.methods.update({'set': self.set_method,
+                             'set10': self.set10_method,
+                             'set_quoted': self.set_quoted_method,
+                             'has': self.has_method,
+                             'get': self.get_method,
+                             'keys': self.keys_method,
+                             'get_unquoted': self.get_unquoted_method,
+                             'merge_from': self.merge_from_method,
+                             })
+
+    def __deepcopy__(self, memo: T.Dict) -> 'ConfigurationDataHolder':
+        return ConfigurationDataHolder(copy.deepcopy(self.held_object), self.interpreter)
+
+    def is_used(self) -> bool:
+        return self.held_object.used
+
+    def __check_used(self) -> None:
+        if self.is_used():
+            raise InterpreterException("Can not set values on configuration object that has been used.")
+
+    @typed_pos_args('configuration_data.set', str, (str, int, bool))
+    @typed_kwargs('configuration_data.set', _CONF_DATA_SET_KWS)
+    def set_method(self, args: T.Tuple[str, T.Union[str, int, bool]], kwargs: 'kwargs.ConfigurationDataSet') -> None:
+        self.__check_used()
+        self.held_object.values[args[0]] = (args[1], kwargs['description'])
+
+    @typed_pos_args('configuration_data.set_quoted', str, str)
+    @typed_kwargs('configuration_data.set_quoted', _CONF_DATA_SET_KWS)
+    def set_quoted_method(self, args: T.Tuple[str, str], kwargs: 'kwargs.ConfigurationDataSet') -> None:
+        self.__check_used()
+        escaped_val = '\\"'.join(args[1].split('"'))
+        self.held_object.values[args[0]] = (f'"{escaped_val}"', kwargs['description'])
+
+    @typed_pos_args('configuration_data.set10', str, (int, bool))
+    @typed_kwargs('configuration_data.set10', _CONF_DATA_SET_KWS)
+    def set10_method(self, args: T.Tuple[str, T.Union[int, bool]], kwargs: 'kwargs.ConfigurationDataSet') -> None:
+        self.__check_used()
+        # bool is a subclass of int, so we need to check for bool explicitly.
+        # We already have typed_pos_args checking that this is either a bool or
+        # an int.
+        if not isinstance(args[1], bool):
+            mlog.deprecation('configuration_data.set10 with number. the `set10` '
+                             'method should only be used with booleans',
+                             location=self.interpreter.current_node)
+            if args[1] < 0:
+                mlog.warning('Passing a number that is less than 0 may not have the intended result, '
+                             'as meson will treat all non-zero values as true.',
+                             location=self.interpreter.current_node)
+        self.held_object.values[args[0]] = (int(args[1]), kwargs['description'])
+
+    @typed_pos_args('configuration_data.has', (str, int, bool))
+    @noKwargs
+    def has_method(self, args: T.Tuple[T.Union[str, int, bool]], kwargs: TYPE_kwargs) -> bool:
+        return args[0] in self.held_object.values
+
+    @FeatureNew('configuration_data.get()', '0.38.0')
+    @typed_pos_args('configuration_data.get', str, optargs=[(str, int, bool)])
+    @noKwargs
+    def get_method(self, args: T.Tuple[str, T.Optional[T.Union[str, int, bool]]],
+                   kwargs: TYPE_kwargs) -> T.Union[str, int, bool]:
+        name = args[0]
+        if name in self.held_object:
+            return self.held_object.get(name)[0]
+        elif args[1] is not None:
+            return args[1]
+        raise InterpreterException(f'Entry {name} not in configuration data.')
+
+    @FeatureNew('configuration_data.get_unquoted()', '0.44.0')
+    @typed_pos_args('configuration_data.get_unquoted', str, optargs=[(str, int, bool)])
+    @noKwargs
+    def get_unquoted_method(self, args: T.Tuple[str, T.Optional[T.Union[str, int, bool]]],
+                            kwargs: TYPE_kwargs) -> T.Union[str, int, bool]:
+        name = args[0]
+        if name in self.held_object:
+            val = self.held_object.get(name)[0]
+        elif args[1] is not None:
+            val = args[1]
+        else:
+            raise InterpreterException(f'Entry {name} not in configuration data.')
+        if isinstance(val, str) and val[0] == '"' and val[-1] == '"':
+            return val[1:-1]
+        return val
+
+    def get(self, name: str) -> T.Tuple[T.Union[str, int, bool], T.Optional[str]]:
+        return self.held_object.values[name]
+
+    @FeatureNew('configuration_data.keys()', '0.57.0')
+    @noPosargs
+    @noKwargs
+    def keys_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[str]:
+        return sorted(self.keys())
+
+    def keys(self) -> T.List[str]:
+        return list(self.held_object.values.keys())
+
+    @typed_pos_args('configuration_data.merge_from', build.ConfigurationData)
+    @noKwargs
+    def merge_from_method(self, args: T.Tuple[build.ConfigurationData], kwargs: TYPE_kwargs) -> None:
+        from_object = args[0]
+        self.held_object.values.update(from_object.values)
+
+
+_PARTIAL_DEP_KWARGS = [
+    KwargInfo('compile_args', bool, default=False),
+    KwargInfo('link_args',    bool, default=False),
+    KwargInfo('links',        bool, default=False),
+    KwargInfo('includes',     bool, default=False),
+    KwargInfo('sources',      bool, default=False),
+]
+
+class DependencyHolder(ObjectHolder[Dependency]):
+    def __init__(self, dep: Dependency, interpreter: 'Interpreter'):
+        super().__init__(dep, interpreter)
+        self.methods.update({'found': self.found_method,
+                             'type_name': self.type_name_method,
+                             'version': self.version_method,
+                             'name': self.name_method,
+                             'get_pkgconfig_variable': self.pkgconfig_method,
+                             'get_configtool_variable': self.configtool_method,
+                             'get_variable': self.variable_method,
+                             'partial_dependency': self.partial_dependency_method,
+                             'include_type': self.include_type_method,
+                             'as_system': self.as_system_method,
+                             'as_link_whole': self.as_link_whole_method,
+                             })
+
+    def found(self) -> bool:
+        return self.found_method([], {})
+
+    @noPosargs
+    @noKwargs
+    def type_name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.type_name
+
+    @noPosargs
+    @noKwargs
+    def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        if self.held_object.type_name == 'internal':
+            return True
+        return self.held_object.found()
+
+    @noPosargs
+    @noKwargs
+    def version_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.get_version()
+
+    @noPosargs
+    @noKwargs
+    def name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.get_name()
+
+    @FeatureDeprecated('dependency.get_pkgconfig_variable', '0.56.0',
+                       'use dependency.get_variable(pkgconfig : ...) instead')
+    @typed_pos_args('dependency.get_pkgconfig_variable', str)
+    @typed_kwargs(
+        'dependency.get_pkgconfig_variable',
+        KwargInfo('default', (str, NoneType)),
+        KwargInfo(
+            'define_variable',
+            ContainerTypeInfo(list, str, pairs=True),
+            default=[],
+            listify=True,
+            validator=lambda x: 'must be of length 2 or empty' if len(x) not in {0, 2} else None,
+        ),
+    )
+    def pkgconfig_method(self, args: T.Tuple[str], kwargs: 'kwargs.DependencyPkgConfigVar') -> str:
+        return self.held_object.get_pkgconfig_variable(args[0], **kwargs)
+
+    @FeatureNew('dependency.get_configtool_variable', '0.44.0')
+    @FeatureDeprecated('dependency.get_configtool_variable', '0.56.0',
+                       'use dependency.get_variable(configtool : ...) instead')
+    @noKwargs
+    @typed_pos_args('dependency.get_config_tool_variable', str)
+    def configtool_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.get_configtool_variable(args[0])
+
+    @FeatureNew('dependency.partial_dependency', '0.46.0')
+    @noPosargs
+    @typed_kwargs('dependency.partial_dependency', *_PARTIAL_DEP_KWARGS)
+    def partial_dependency_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.DependencyMethodPartialDependency') -> Dependency:
+        pdep = self.held_object.get_partial_dependency(**kwargs)
+        return pdep
+
+    @FeatureNew('dependency.get_variable', '0.51.0')
+    @typed_pos_args('dependency.get_variable', optargs=[str])
+    @typed_kwargs(
+        'dependency.get_variable',
+        KwargInfo('cmake', (str, NoneType)),
+        KwargInfo('pkgconfig', (str, NoneType)),
+        KwargInfo('configtool', (str, NoneType)),
+        KwargInfo('internal', (str, NoneType), since='0.54.0'),
+        KwargInfo('default_value', (str, NoneType)),
+        KwargInfo('pkgconfig_define', ContainerTypeInfo(list, str, pairs=True), default=[], listify=True),
+    )
+    def variable_method(self, args: T.Tuple[T.Optional[str]], kwargs: 'kwargs.DependencyGetVariable') -> str:
+        default_varname = args[0]
+        if default_varname is not None:
+            FeatureNew('Positional argument to dependency.get_variable()', '0.58.0').use(self.subproject, self.current_node)
+        return self.held_object.get_variable(
+            cmake=kwargs['cmake'] or default_varname,
+            pkgconfig=kwargs['pkgconfig'] or default_varname,
+            configtool=kwargs['configtool'] or default_varname,
+            internal=kwargs['internal'] or default_varname,
+            default_value=kwargs['default_value'],
+            pkgconfig_define=kwargs['pkgconfig_define'],
+        )
+
+    @FeatureNew('dependency.include_type', '0.52.0')
+    @noPosargs
+    @noKwargs
+    def include_type_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.get_include_type()
+
+    @FeatureNew('dependency.as_system', '0.52.0')
+    @noKwargs
+    @typed_pos_args('dependency.as_system', optargs=[str])
+    def as_system_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> Dependency:
+        return self.held_object.generate_system_dependency(args[0] or 'system')
+
+    @FeatureNew('dependency.as_link_whole', '0.56.0')
+    @noKwargs
+    @noPosargs
+    def as_link_whole_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> Dependency:
+        if not isinstance(self.held_object, InternalDependency):
+            raise InterpreterException('as_link_whole method is only supported on declare_dependency() objects')
+        new_dep = self.held_object.generate_link_whole_dependency()
+        return new_dep
+
+class ExternalProgramHolder(ObjectHolder[ExternalProgram]):
+    def __init__(self, ep: ExternalProgram, interpreter: 'Interpreter') -> None:
+        super().__init__(ep, interpreter)
+        self.methods.update({'found': self.found_method,
+                             'path': self.path_method,
+                             'version': self.version_method,
+                             'full_path': self.full_path_method})
+
+    @noPosargs
+    @noKwargs
+    def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return self.found()
+
+    @noPosargs
+    @noKwargs
+    @FeatureDeprecated('ExternalProgram.path', '0.55.0',
+                       'use ExternalProgram.full_path() instead')
+    def path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self._full_path()
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('ExternalProgram.full_path', '0.55.0')
+    def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self._full_path()
+
+    def _full_path(self) -> str:
+        if not self.found():
+            raise InterpreterException('Unable to get the path of a not-found external program')
+        path = self.held_object.get_path()
+        assert path is not None
+        return path
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('ExternalProgram.version', '0.62.0')
+    def version_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        if not self.found():
+            raise InterpreterException('Unable to get the version of a not-found external program')
+        try:
+            return self.held_object.get_version(self.interpreter)
+        except mesonlib.MesonException:
+            return 'unknown'
+
+    def found(self) -> bool:
+        return self.held_object.found()
+
+class ExternalLibraryHolder(ObjectHolder[ExternalLibrary]):
+    def __init__(self, el: ExternalLibrary, interpreter: 'Interpreter'):
+        super().__init__(el, interpreter)
+        self.methods.update({'found': self.found_method,
+                             'type_name': self.type_name_method,
+                             'partial_dependency': self.partial_dependency_method,
+                             })
+
+    @noPosargs
+    @noKwargs
+    def type_name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.type_name
+
+    @noPosargs
+    @noKwargs
+    def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return self.held_object.found()
+
+    @FeatureNew('dependency.partial_dependency', '0.46.0')
+    @noPosargs
+    @typed_kwargs('dependency.partial_dependency', *_PARTIAL_DEP_KWARGS)
+    def partial_dependency_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.DependencyMethodPartialDependency') -> Dependency:
+        pdep = self.held_object.get_partial_dependency(**kwargs)
+        return pdep
+
+# A machine that's statically known from the cross file
+class MachineHolder(ObjectHolder['MachineInfo']):
+    def __init__(self, machine_info: 'MachineInfo', interpreter: 'Interpreter'):
+        super().__init__(machine_info, interpreter)
+        self.methods.update({'system': self.system_method,
+                             'cpu': self.cpu_method,
+                             'cpu_family': self.cpu_family_method,
+                             'endian': self.endian_method,
+                             'kernel': self.kernel_method,
+                             'subsystem': self.subsystem_method,
+                             })
+
+    @noPosargs
+    @noKwargs
+    def cpu_family_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.cpu_family
+
+    @noPosargs
+    @noKwargs
+    def cpu_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.cpu
+
+    @noPosargs
+    @noKwargs
+    def system_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.system
+
+    @noPosargs
+    @noKwargs
+    def endian_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.endian
+
+    @noPosargs
+    @noKwargs
+    def kernel_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        if self.held_object.kernel is not None:
+            return self.held_object.kernel
+        raise InterpreterException('Kernel not defined or could not be autodetected.')
+
+    @noPosargs
+    @noKwargs
+    def subsystem_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        if self.held_object.subsystem is not None:
+            return self.held_object.subsystem
+        raise InterpreterException('Subsystem not defined or could not be autodetected.')
+
+
+class IncludeDirsHolder(ObjectHolder[build.IncludeDirs]):
+    pass
+
+class FileHolder(ObjectHolder[mesonlib.File]):
+    pass
+
+class HeadersHolder(ObjectHolder[build.Headers]):
+    pass
+
+class DataHolder(ObjectHolder[build.Data]):
+    pass
+
+class SymlinkDataHolder(ObjectHolder[build.SymlinkData]):
+    pass
+
+class InstallDirHolder(ObjectHolder[build.InstallDir]):
+    pass
+
+class ManHolder(ObjectHolder[build.Man]):
+    pass
+
+class EmptyDirHolder(ObjectHolder[build.EmptyDir]):
+    pass
+
+class GeneratedObjectsHolder(ObjectHolder[build.ExtractedObjects]):
+    pass
+
+class Test(MesonInterpreterObject):
+    def __init__(self, name: str, project: str, suite: T.List[str],
+                 exe: T.Union[ExternalProgram, build.Executable, build.CustomTarget],
+                 depends: T.List[T.Union[build.CustomTarget, build.BuildTarget]],
+                 is_parallel: bool,
+                 cmd_args: T.List[T.Union[str, mesonlib.File, build.Target]],
+                 env: build.EnvironmentVariables,
+                 should_fail: bool, timeout: int, workdir: T.Optional[str], protocol: str,
+                 priority: int, verbose: bool):
+        super().__init__()
+        self.name = name
+        self.suite = listify(suite)
+        self.project_name = project
+        self.exe = exe
+        self.depends = depends
+        self.is_parallel = is_parallel
+        self.cmd_args = cmd_args
+        self.env = env
+        self.should_fail = should_fail
+        self.timeout = timeout
+        self.workdir = workdir
+        self.protocol = TestProtocol.from_str(protocol)
+        self.priority = priority
+        self.verbose = verbose
+
+    def get_exe(self) -> T.Union[ExternalProgram, build.Executable, build.CustomTarget]:
+        return self.exe
+
+    def get_name(self) -> str:
+        return self.name
+
+class NullSubprojectInterpreter(HoldableObject):
+    pass
+
+# TODO: This should really be an `ObjectHolder`, but the additional stuff in this
+#       class prevents this. Thus, this class should be split into a pure
+#       `ObjectHolder` and a class specifically for storing in `Interpreter`.
+class SubprojectHolder(MesonInterpreterObject):
+
+    def __init__(self, subinterpreter: T.Union['Interpreter', NullSubprojectInterpreter],
+                 subdir: str,
+                 warnings: int = 0,
+                 disabled_feature: T.Optional[str] = None,
+                 exception: T.Optional[Exception] = None,
+                 callstack: T.Optional[T.List[str]] = None) -> None:
+        super().__init__()
+        self.held_object = subinterpreter
+        self.warnings = warnings
+        self.disabled_feature = disabled_feature
+        self.exception = exception
+        self.subdir = PurePath(subdir).as_posix()
+        self.cm_interpreter: T.Optional[CMakeInterpreter] = None
+        self.callstack = callstack
+        self.methods.update({'get_variable': self.get_variable_method,
+                             'found': self.found_method,
+                             })
+
+    @noPosargs
+    @noKwargs
+    def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return self.found()
+
+    def found(self) -> bool:
+        return not isinstance(self.held_object, NullSubprojectInterpreter)
+
+    @noKwargs
+    @noArgsFlattening
+    @unholder_return
+    def get_variable_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.Union[TYPE_var, InterpreterObject]:
+        if len(args) < 1 or len(args) > 2:
+            raise InterpreterException('Get_variable takes one or two arguments.')
+        if isinstance(self.held_object, NullSubprojectInterpreter):  # == not self.found()
+            raise InterpreterException(f'Subproject "{self.subdir}" disabled can\'t get_variable on it.')
+        varname = args[0]
+        if not isinstance(varname, str):
+            raise InterpreterException('Get_variable first argument must be a string.')
+        try:
+            return self.held_object.variables[varname]
+        except KeyError:
+            pass
+
+        if len(args) == 2:
+            return self.held_object._holderify(args[1])
+
+        raise InvalidArguments(f'Requested variable "{varname}" not found.')
+
+class ModuleObjectHolder(ObjectHolder[ModuleObject]):
+    def method_call(self, method_name: str, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> TYPE_var:
+        modobj = self.held_object
+        method = modobj.methods.get(method_name)
+        if not method:
+            raise InvalidCode(f'Unknown method {method_name!r} in object.')
+        if not getattr(method, 'no-args-flattening', False):
+            args = flatten(args)
+        if not getattr(method, 'no-second-level-holder-flattening', False):
+            args, kwargs = resolve_second_level_holders(args, kwargs)
+        state = ModuleState(self.interpreter)
+        # Many modules do for example self.interpreter.find_program_impl(),
+        # so we have to ensure they use the current interpreter and not the one
+        # that first imported that module, otherwise it will use outdated
+        # overrides.
+        if isinstance(modobj, ExtensionModule):
+            modobj.interpreter = self.interpreter
+        ret = method(state, args, kwargs)
+        if isinstance(ret, ModuleReturnValue):
+            self.interpreter.process_new_values(ret.new_objects)
+            ret = ret.return_value
+        return ret
+
+class MutableModuleObjectHolder(ModuleObjectHolder, MutableInterpreterObject):
+    def __deepcopy__(self, memo: T.Dict[int, T.Any]) -> 'MutableModuleObjectHolder':
+        # Deepcopy only held object, not interpreter
+        modobj = copy.deepcopy(self.held_object, memo)
+        return MutableModuleObjectHolder(modobj, self.interpreter)
+
+
+_BuildTarget = T.TypeVar('_BuildTarget', bound=T.Union[build.BuildTarget, build.BothLibraries])
+
+class BuildTargetHolder(ObjectHolder[_BuildTarget]):
+    def __init__(self, target: _BuildTarget, interp: 'Interpreter'):
+        super().__init__(target, interp)
+        self.methods.update({'extract_objects': self.extract_objects_method,
+                             'extract_all_objects': self.extract_all_objects_method,
+                             'name': self.name_method,
+                             'get_id': self.get_id_method,
+                             'outdir': self.outdir_method,
+                             'full_path': self.full_path_method,
+                             'path': self.path_method,
+                             'found': self.found_method,
+                             'private_dir_include': self.private_dir_include_method,
+                             })
+
+    def __repr__(self) -> str:
+        r = '<{} {}: {}>'
+        h = self.held_object
+        assert isinstance(h, build.BuildTarget)
+        return r.format(self.__class__.__name__, h.get_id(), h.filename)
+
+    @property
+    def _target_object(self) -> build.BuildTarget:
+        if isinstance(self.held_object, build.BothLibraries):
+            return self.held_object.get_default_object()
+        assert isinstance(self.held_object, build.BuildTarget)
+        return self.held_object
+
+    def is_cross(self) -> bool:
+        return not self._target_object.environment.machines.matches_build_machine(self._target_object.for_machine)
+
+    @noPosargs
+    @noKwargs
+    def found_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        if not (isinstance(self.held_object, build.Executable) and self.held_object.was_returned_by_find_program):
+            FeatureNew.single_use('BuildTarget.found', '0.59.0', subproject=self.held_object.subproject)
+        return True
+
+    @noPosargs
+    @noKwargs
+    def private_dir_include_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.IncludeDirs:
+        return build.IncludeDirs('', [], False, [self.interpreter.backend.get_target_private_dir(self._target_object)])
+
+    @noPosargs
+    @noKwargs
+    def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.interpreter.backend.get_target_filename_abs(self._target_object)
+
+    @noPosargs
+    @noKwargs
+    @FeatureDeprecated('BuildTarget.path', '0.55.0', 'Use BuildTarget.full_path instead')
+    def path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.interpreter.backend.get_target_filename_abs(self._target_object)
+
+    @noPosargs
+    @noKwargs
+    def outdir_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.interpreter.backend.get_target_dir(self._target_object)
+
+    @noKwargs
+    @typed_pos_args('extract_objects', varargs=(mesonlib.File, str, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList))
+    def extract_objects_method(self, args: T.Tuple[T.List[T.Union[mesonlib.FileOrString, 'build.GeneratedTypes']]], kwargs: TYPE_nkwargs) -> build.ExtractedObjects:
+        return self._target_object.extract_objects(args[0])
+
+    @noPosargs
+    @typed_kwargs(
+        'extract_all_objects',
+        KwargInfo(
+            'recursive', bool, default=False, since='0.46.0',
+            not_set_warning=textwrap.dedent('''\
+                extract_all_objects called without setting recursive
+                keyword argument. Meson currently defaults to
+                non-recursive to maintain backward compatibility but
+                the default will be changed in the future.
+            ''')
+        )
+    )
+    def extract_all_objects_method(self, args: T.List[TYPE_nvar], kwargs: 'kwargs.BuildTargeMethodExtractAllObjects') -> build.ExtractedObjects:
+        return self._target_object.extract_all_objects(kwargs['recursive'])
+
+    @noPosargs
+    @noKwargs
+    @FeatureDeprecated('BuildTarget.get_id', '1.2.0',
+                       'This was never formally documented and does not seem to have a real world use. ' +
+                       'See https://github.com/mesonbuild/meson/pull/6061')
+    def get_id_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self._target_object.get_id()
+
+    @FeatureNew('name', '0.54.0')
+    @noPosargs
+    @noKwargs
+    def name_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self._target_object.name
+
+class ExecutableHolder(BuildTargetHolder[build.Executable]):
+    pass
+
+class StaticLibraryHolder(BuildTargetHolder[build.StaticLibrary]):
+    pass
+
+class SharedLibraryHolder(BuildTargetHolder[build.SharedLibrary]):
+    pass
+
+class BothLibrariesHolder(BuildTargetHolder[build.BothLibraries]):
+    def __init__(self, libs: build.BothLibraries, interp: 'Interpreter'):
+        # FIXME: This build target always represents the shared library, but
+        # that should be configurable.
+        super().__init__(libs, interp)
+        self.methods.update({'get_shared_lib': self.get_shared_lib_method,
+                             'get_static_lib': self.get_static_lib_method,
+                             })
+
+    def __repr__(self) -> str:
+        r = '<{} {}: {}, {}: {}>'
+        h1 = self.held_object.shared
+        h2 = self.held_object.static
+        return r.format(self.__class__.__name__, h1.get_id(), h1.filename, h2.get_id(), h2.filename)
+
+    @noPosargs
+    @noKwargs
+    def get_shared_lib_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.SharedLibrary:
+        return self.held_object.shared
+
+    @noPosargs
+    @noKwargs
+    def get_static_lib_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> build.StaticLibrary:
+        return self.held_object.static
+
+class SharedModuleHolder(BuildTargetHolder[build.SharedModule]):
+    pass
+
+class JarHolder(BuildTargetHolder[build.Jar]):
+    pass
+
+class CustomTargetIndexHolder(ObjectHolder[build.CustomTargetIndex]):
+    def __init__(self, target: build.CustomTargetIndex, interp: 'Interpreter'):
+        super().__init__(target, interp)
+        self.methods.update({'full_path': self.full_path_method,
+                             })
+
+    @FeatureNew('custom_target[i].full_path', '0.54.0')
+    @noPosargs
+    @noKwargs
+    def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        assert self.interpreter.backend is not None
+        return self.interpreter.backend.get_target_filename_abs(self.held_object)
+
+class CustomTargetHolder(ObjectHolder[build.CustomTarget]):
+    def __init__(self, target: 'build.CustomTarget', interp: 'Interpreter'):
+        super().__init__(target, interp)
+        self.methods.update({'full_path': self.full_path_method,
+                             'to_list': self.to_list_method,
+                             })
+
+        self.operators.update({
+            MesonOperator.INDEX: self.op_index,
+        })
+
+    def __repr__(self) -> str:
+        r = '<{} {}: {}>'
+        h = self.held_object
+        return r.format(self.__class__.__name__, h.get_id(), h.command)
+
+    @noPosargs
+    @noKwargs
+    def full_path_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.interpreter.backend.get_target_filename_abs(self.held_object)
+
+    @FeatureNew('custom_target.to_list', '0.54.0')
+    @noPosargs
+    @noKwargs
+    def to_list_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[build.CustomTargetIndex]:
+        result = []
+        for i in self.held_object:
+            result.append(i)
+        return result
+
+    @noKwargs
+    @typed_operator(MesonOperator.INDEX, int)
+    def op_index(self, other: int) -> build.CustomTargetIndex:
+        try:
+            return self.held_object[other]
+        except IndexError:
+            raise InvalidArguments(f'Index {other} out of bounds of custom target {self.held_object.name} output of size {len(self.held_object)}.')
+
+class RunTargetHolder(ObjectHolder[build.RunTarget]):
+    pass
+
+class AliasTargetHolder(ObjectHolder[build.AliasTarget]):
+    pass
+
+class GeneratedListHolder(ObjectHolder[build.GeneratedList]):
+    pass
+
+class GeneratorHolder(ObjectHolder[build.Generator]):
+    def __init__(self, gen: build.Generator, interpreter: 'Interpreter'):
+        super().__init__(gen, interpreter)
+        self.methods.update({'process': self.process_method})
+
+    @typed_pos_args('generator.process', min_varargs=1, varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList))
+    @typed_kwargs(
+        'generator.process',
+        KwargInfo('preserve_path_from', (str, NoneType), since='0.45.0'),
+        KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+    )
+    def process_method(self,
+                       args: T.Tuple[T.List[T.Union[str, mesonlib.File, 'build.GeneratedTypes']]],
+                       kwargs: 'kwargs.GeneratorProcess') -> build.GeneratedList:
+        preserve_path_from = kwargs['preserve_path_from']
+        if preserve_path_from is not None:
+            preserve_path_from = os.path.normpath(preserve_path_from)
+            if not os.path.isabs(preserve_path_from):
+                # This is a bit of a hack. Fix properly before merging.
+                raise InvalidArguments('Preserve_path_from must be an absolute path for now. Sorry.')
+
+        if any(isinstance(a, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for a in args[0]):
+            FeatureNew.single_use(
+                'Calling generator.process with CustomTarget or Index of CustomTarget.',
+                '0.57.0', self.interpreter.subproject)
+
+        gl = self.held_object.process_files(args[0], self.interpreter,
+                                            preserve_path_from, extra_args=kwargs['extra_args'])
+
+        return gl
+
+
+class StructuredSourcesHolder(ObjectHolder[build.StructuredSources]):
+
+    def __init__(self, sources: build.StructuredSources, interp: 'Interpreter'):
+        super().__init__(sources, interp)
diff --git a/vendored-meson/meson/mesonbuild/interpreter/kwargs.py b/vendored-meson/meson/mesonbuild/interpreter/kwargs.py
new file mode 100644
index 000000000000..cf476cefbb1a
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreter/kwargs.py
@@ -0,0 +1,310 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2021 The Meson Developers
+# Copyright © 2021 Intel Corporation
+from __future__ import annotations
+
+"""Keyword Argument type annotations."""
+
+import typing as T
+
+from typing_extensions import TypedDict, Literal, Protocol
+
+from .. import build
+from .. import coredata
+from ..compilers import Compiler
+from ..mesonlib import MachineChoice, File, FileMode, FileOrString, OptionKey
+from ..modules.cmake import CMakeSubprojectOptions
+from ..programs import ExternalProgram
+
+
+class FuncAddProjectArgs(TypedDict):
+
+    """Keyword Arguments for the add_*_arguments family of arguments.
+
+    including `add_global_arguments`, `add_project_arguments`, and their
+    link variants
+
+    Because of the use of a convertor function, we get the native keyword as
+    a MachineChoice instance already.
+    """
+
+    native: MachineChoice
+    language: T.List[str]
+
+
+class BaseTest(TypedDict):
+
+    """Shared base for the Rust module."""
+
+    args: T.List[T.Union[str, File, build.Target]]
+    should_fail: bool
+    timeout: int
+    workdir: T.Optional[str]
+    depends: T.List[T.Union[build.CustomTarget, build.BuildTarget]]
+    priority: int
+    env: build.EnvironmentVariables
+    suite: T.List[str]
+
+
+class FuncBenchmark(BaseTest):
+
+    """Keyword Arguments shared between `test` and `benchmark`."""
+
+    protocol: Literal['exitcode', 'tap', 'gtest', 'rust']
+
+
+class FuncTest(FuncBenchmark):
+
+    """Keyword Arguments for `test`
+
+    `test` only adds the `is_parallel` argument over benchmark, so inheritance
+    is helpful here.
+    """
+
+    is_parallel: bool
+
+
+class ExtractRequired(TypedDict):
+
+    """Keyword Arguments consumed by the `extract_required_kwargs` function.
+
+    Any function that uses the `required` keyword argument which accepts either
+    a boolean or a feature option should inherit it's arguments from this class.
+    """
+
+    required: T.Union[bool, coredata.UserFeatureOption]
+
+
+class ExtractSearchDirs(TypedDict):
+
+    """Keyword arguments consumed by the `extract_search_dirs` function.
+
+    See the not in `ExtractRequired`
+    """
+
+    dirs: T.List[str]
+
+
+class FuncGenerator(TypedDict):
+
+    """Keyword rguments for the generator function."""
+
+    arguments: T.List[str]
+    output: T.List[str]
+    depfile: T.Optional[str]
+    capture:  bool
+    depends: T.List[T.Union[build.BuildTarget, build.CustomTarget]]
+
+
+class GeneratorProcess(TypedDict):
+
+    """Keyword Arguments for generator.process."""
+
+    preserve_path_from: T.Optional[str]
+    extra_args: T.List[str]
+
+class DependencyMethodPartialDependency(TypedDict):
+
+    """ Keyword Arguments for the dep.partial_dependency methods """
+
+    compile_args: bool
+    link_args: bool
+    links: bool
+    includes: bool
+    sources: bool
+
+class BuildTargeMethodExtractAllObjects(TypedDict):
+    recursive: bool
+
+class FuncInstallSubdir(TypedDict):
+
+    install_dir: str
+    strip_directory: bool
+    exclude_files: T.List[str]
+    exclude_directories: T.List[str]
+    install_mode: FileMode
+
+
+class FuncInstallData(TypedDict):
+
+    install_dir: str
+    sources: T.List[FileOrString]
+    rename: T.List[str]
+    install_mode: FileMode
+
+
+class FuncInstallHeaders(TypedDict):
+
+    install_dir: T.Optional[str]
+    install_mode: FileMode
+    subdir: T.Optional[str]
+
+
+class FuncInstallMan(TypedDict):
+
+    install_dir: T.Optional[str]
+    install_mode: FileMode
+    locale: T.Optional[str]
+
+
+class FuncImportModule(ExtractRequired):
+
+    disabler: bool
+
+
+class FuncIncludeDirectories(TypedDict):
+
+    is_system: bool
+
+class FuncAddLanguages(ExtractRequired):
+
+    native: T.Optional[bool]
+
+class RunTarget(TypedDict):
+
+    command: T.List[T.Union[str, build.BuildTarget, build.CustomTarget, ExternalProgram, File]]
+    depends: T.List[T.Union[build.BuildTarget, build.CustomTarget]]
+    env: build.EnvironmentVariables
+
+
+class CustomTarget(TypedDict):
+
+    build_always: bool
+    build_always_stale: T.Optional[bool]
+    build_by_default: T.Optional[bool]
+    capture: bool
+    command: T.List[T.Union[str, build.BuildTarget, build.CustomTarget,
+                            build.CustomTargetIndex, ExternalProgram, File]]
+    console: bool
+    depend_files: T.List[FileOrString]
+    depends: T.List[T.Union[build.BuildTarget, build.CustomTarget]]
+    depfile: T.Optional[str]
+    env: build.EnvironmentVariables
+    feed: bool
+    input: T.List[T.Union[str, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex,
+                          build.ExtractedObjects, build.GeneratedList, ExternalProgram, File]]
+    install: bool
+    install_dir: T.List[T.Union[str, T.Literal[False]]]
+    install_mode: FileMode
+    install_tag: T.List[T.Optional[str]]
+    output: T.List[str]
+
+class AddTestSetup(TypedDict):
+
+    exe_wrapper: T.List[T.Union[str, ExternalProgram]]
+    gdb: bool
+    timeout_multiplier: int
+    is_default: bool
+    exclude_suites: T.List[str]
+    env: build.EnvironmentVariables
+
+
+class Project(TypedDict):
+
+    version: T.Optional[FileOrString]
+    meson_version: T.Optional[str]
+    default_options: T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]
+    license: T.List[str]
+    subproject_dir: str
+
+
+class _FoundProto(Protocol):
+
+    """Protocol for subdir arguments.
+
+    This allows us to define any object that has a found(self) -> bool method
+    """
+
+    def found(self) -> bool: ...
+
+
+class Subdir(TypedDict):
+
+    if_found: T.List[_FoundProto]
+
+
+class Summary(TypedDict):
+
+    section: str
+    bool_yn: bool
+    list_sep: T.Optional[str]
+
+
+class FindProgram(ExtractRequired, ExtractSearchDirs):
+
+    native: MachineChoice
+    version: T.List[str]
+
+
+class RunCommand(TypedDict):
+
+    check: bool
+    capture: T.Optional[bool]
+    env: build.EnvironmentVariables
+
+
+class FeatureOptionRequire(TypedDict):
+
+    error_message: T.Optional[str]
+
+
+class DependencyPkgConfigVar(TypedDict):
+
+    default: T.Optional[str]
+    define_variable: T.List[str]
+
+
+class DependencyGetVariable(TypedDict):
+
+    cmake: T.Optional[str]
+    pkgconfig: T.Optional[str]
+    configtool: T.Optional[str]
+    internal: T.Optional[str]
+    default_value: T.Optional[str]
+    pkgconfig_define: T.List[str]
+
+
+class ConfigurationDataSet(TypedDict):
+
+    description: T.Optional[str]
+
+class VcsTag(TypedDict):
+
+    command: T.List[T.Union[str, build.BuildTarget, build.CustomTarget,
+                            build.CustomTargetIndex, ExternalProgram, File]]
+    fallback: T.Optional[str]
+    input: T.List[T.Union[str, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex,
+                          build.ExtractedObjects, build.GeneratedList, ExternalProgram, File]]
+    output: T.List[str]
+    replace_string: str
+
+
+class ConfigureFile(TypedDict):
+
+    output: str
+    capture: bool
+    format: T.Literal['meson', 'cmake', 'cmake@']
+    output_format: T.Literal['c', 'nasm']
+    depfile: T.Optional[str]
+    install: T.Optional[bool]
+    install_dir: T.Union[str, T.Literal[False]]
+    install_mode: FileMode
+    install_tag: T.Optional[str]
+    encoding: str
+    command: T.Optional[T.List[T.Union[build.Executable, ExternalProgram, Compiler, File, str]]]
+    input: T.List[FileOrString]
+    configuration: T.Optional[T.Union[T.Dict[str, T.Union[str, int, bool]], build.ConfigurationData]]
+
+
+class Subproject(ExtractRequired):
+
+    default_options: T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]
+    version: T.List[str]
+
+
+class DoSubproject(ExtractRequired):
+
+    default_options: T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]
+    version: T.List[str]
+    cmake_options: T.List[str]
+    options: T.Optional[CMakeSubprojectOptions]
diff --git a/vendored-meson/meson/mesonbuild/interpreter/mesonmain.py b/vendored-meson/meson/mesonbuild/interpreter/mesonmain.py
new file mode 100644
index 000000000000..73df5edd19c7
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreter/mesonmain.py
@@ -0,0 +1,477 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+from __future__ import annotations
+
+import os
+import typing as T
+
+from .. import mesonlib
+from .. import dependencies
+from .. import build
+from .. import mlog, coredata
+
+from ..mesonlib import MachineChoice, OptionKey
+from ..programs import OverrideProgram, ExternalProgram
+from ..interpreter.type_checking import ENV_KW, ENV_METHOD_KW, ENV_SEPARATOR_KW, env_convertor_with_method
+from ..interpreterbase import (MesonInterpreterObject, FeatureNew, FeatureDeprecated,
+                               typed_pos_args,  noArgsFlattening, noPosargs, noKwargs,
+                               typed_kwargs, KwargInfo, InterpreterException)
+from .primitives import MesonVersionString
+from .type_checking import NATIVE_KW, NoneType
+
+if T.TYPE_CHECKING:
+    from typing_extensions import Literal
+    from ..backend.backends import ExecutableSerialisation
+    from ..compilers import Compiler
+    from ..interpreterbase import TYPE_kwargs, TYPE_var
+    from .interpreter import Interpreter
+
+    from typing_extensions import TypedDict
+
+    class FuncOverrideDependency(TypedDict):
+
+        native: mesonlib.MachineChoice
+        static: T.Optional[bool]
+
+    class AddInstallScriptKW(TypedDict):
+
+        skip_if_destdir: bool
+        install_tag: str
+        dry_run: bool
+
+    class NativeKW(TypedDict):
+
+        native: mesonlib.MachineChoice
+
+    class AddDevenvKW(TypedDict):
+        method: Literal['set', 'prepend', 'append']
+        separator: str
+
+
+class MesonMain(MesonInterpreterObject):
+    def __init__(self, build: 'build.Build', interpreter: 'Interpreter'):
+        super().__init__(subproject=interpreter.subproject)
+        self.build = build
+        self.interpreter = interpreter
+        self.methods.update({'add_devenv': self.add_devenv_method,
+                             'add_dist_script': self.add_dist_script_method,
+                             'add_install_script': self.add_install_script_method,
+                             'add_postconf_script': self.add_postconf_script_method,
+                             'backend': self.backend_method,
+                             'build_options': self.build_options_method,
+                             'build_root': self.build_root_method,
+                             'can_run_host_binaries': self.can_run_host_binaries_method,
+                             'current_source_dir': self.current_source_dir_method,
+                             'current_build_dir': self.current_build_dir_method,
+                             'get_compiler': self.get_compiler_method,
+                             'get_cross_property': self.get_cross_property_method,
+                             'get_external_property': self.get_external_property_method,
+                             'global_build_root': self.global_build_root_method,
+                             'global_source_root': self.global_source_root_method,
+                             'has_exe_wrapper': self.has_exe_wrapper_method,
+                             'has_external_property': self.has_external_property_method,
+                             'install_dependency_manifest': self.install_dependency_manifest_method,
+                             'is_cross_build': self.is_cross_build_method,
+                             'is_subproject': self.is_subproject_method,
+                             'is_unity': self.is_unity_method,
+                             'override_dependency': self.override_dependency_method,
+                             'override_find_program': self.override_find_program_method,
+                             'project_build_root': self.project_build_root_method,
+                             'project_license': self.project_license_method,
+                             'project_license_files': self.project_license_files_method,
+                             'project_name': self.project_name_method,
+                             'project_source_root': self.project_source_root_method,
+                             'project_version': self.project_version_method,
+                             'source_root': self.source_root_method,
+                             'version': self.version_method,
+                             })
+
+    def _find_source_script(
+            self, name: str, prog: T.Union[str, mesonlib.File, build.Executable, ExternalProgram],
+            args: T.List[str]) -> 'ExecutableSerialisation':
+        largs: T.List[T.Union[str, build.Executable, ExternalProgram]] = []
+
+        if isinstance(prog, (build.Executable, ExternalProgram)):
+            FeatureNew.single_use(f'Passing executable/found program object to script parameter of {name}',
+                                  '0.55.0', self.subproject, location=self.current_node)
+            largs.append(prog)
+        else:
+            if isinstance(prog, mesonlib.File):
+                FeatureNew.single_use(f'Passing file object to script parameter of {name}',
+                                      '0.57.0', self.subproject, location=self.current_node)
+            found = self.interpreter.find_program_impl([prog])
+            largs.append(found)
+
+        largs.extend(args)
+        es = self.interpreter.backend.get_executable_serialisation(largs, verbose=True)
+        es.subproject = self.interpreter.subproject
+        return es
+
+    def _process_script_args(
+            self, name: str, args: T.Sequence[T.Union[
+                str, mesonlib.File, build.BuildTarget, build.CustomTarget,
+                build.CustomTargetIndex,
+                ExternalProgram,
+            ]]) -> T.List[str]:
+        script_args = []  # T.List[str]
+        new = False
+        for a in args:
+            if isinstance(a, str):
+                script_args.append(a)
+            elif isinstance(a, mesonlib.File):
+                new = True
+                script_args.append(a.rel_to_builddir(self.interpreter.environment.source_dir))
+            elif isinstance(a, (build.BuildTarget, build.CustomTarget, build.CustomTargetIndex)):
+                new = True
+                script_args.extend([os.path.join(a.get_subdir(), o) for o in a.get_outputs()])
+
+                # This feels really hacky, but I'm not sure how else to fix
+                # this without completely rewriting install script handling.
+                # This is complicated by the fact that the install target
+                # depends on all.
+                if isinstance(a, build.CustomTargetIndex):
+                    a.target.build_by_default = True
+                else:
+                    a.build_by_default = True
+            else:
+                script_args.extend(a.command)
+                new = True
+
+        if new:
+            FeatureNew.single_use(
+                f'Calling "{name}" with File, CustomTarget, Index of CustomTarget, '
+                'Executable, or ExternalProgram',
+                '0.55.0', self.interpreter.subproject, location=self.current_node)
+        return script_args
+
+    @typed_pos_args(
+        'meson.add_install_script',
+        (str, mesonlib.File, build.Executable, ExternalProgram),
+        varargs=(str, mesonlib.File, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex, ExternalProgram)
+    )
+    @typed_kwargs(
+        'meson.add_install_script',
+        KwargInfo('skip_if_destdir', bool, default=False, since='0.57.0'),
+        KwargInfo('install_tag', (str, NoneType), since='0.60.0'),
+        KwargInfo('dry_run', bool, default=False, since='1.1.0'),
+    )
+    def add_install_script_method(
+            self,
+            args: T.Tuple[T.Union[str, mesonlib.File, build.Executable, ExternalProgram],
+                          T.List[T.Union[str, mesonlib.File, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex, ExternalProgram]]],
+            kwargs: 'AddInstallScriptKW') -> None:
+        script_args = self._process_script_args('add_install_script', args[1])
+        script = self._find_source_script('add_install_script', args[0], script_args)
+        script.skip_if_destdir = kwargs['skip_if_destdir']
+        script.tag = kwargs['install_tag']
+        script.dry_run = kwargs['dry_run']
+        self.build.install_scripts.append(script)
+
+    @typed_pos_args(
+        'meson.add_postconf_script',
+        (str, mesonlib.File, ExternalProgram),
+        varargs=(str, mesonlib.File, ExternalProgram)
+    )
+    @noKwargs
+    def add_postconf_script_method(
+            self,
+            args: T.Tuple[T.Union[str, mesonlib.File, ExternalProgram],
+                          T.List[T.Union[str, mesonlib.File, ExternalProgram]]],
+            kwargs: 'TYPE_kwargs') -> None:
+        script_args = self._process_script_args('add_postconf_script', args[1])
+        script = self._find_source_script('add_postconf_script', args[0], script_args)
+        self.build.postconf_scripts.append(script)
+
+    @typed_pos_args(
+        'meson.add_dist_script',
+        (str, mesonlib.File, ExternalProgram),
+        varargs=(str, mesonlib.File, ExternalProgram)
+    )
+    @noKwargs
+    def add_dist_script_method(
+            self,
+            args: T.Tuple[T.Union[str, mesonlib.File, ExternalProgram],
+                          T.List[T.Union[str, mesonlib.File, ExternalProgram]]],
+            kwargs: 'TYPE_kwargs') -> None:
+        if args[1]:
+            FeatureNew.single_use('Calling "add_dist_script" with multiple arguments',
+                                  '0.49.0', self.interpreter.subproject, location=self.current_node)
+        if self.interpreter.subproject != '':
+            FeatureNew.single_use('Calling "add_dist_script" in a subproject',
+                                  '0.58.0', self.interpreter.subproject, location=self.current_node)
+        script_args = self._process_script_args('add_dist_script', args[1])
+        script = self._find_source_script('add_dist_script', args[0], script_args)
+        self.build.dist_scripts.append(script)
+
+    @noPosargs
+    @noKwargs
+    def current_source_dir_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        src = self.interpreter.environment.source_dir
+        sub = self.interpreter.subdir
+        if sub == '':
+            return src
+        return os.path.join(src, sub)
+
+    @noPosargs
+    @noKwargs
+    def current_build_dir_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        src = self.interpreter.environment.build_dir
+        sub = self.interpreter.subdir
+        if sub == '':
+            return src
+        return os.path.join(src, sub)
+
+    @noPosargs
+    @noKwargs
+    def backend_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        return self.interpreter.backend.name
+
+    @noPosargs
+    @noKwargs
+    @FeatureDeprecated('meson.source_root', '0.56.0', 'use meson.project_source_root() or meson.global_source_root() instead.')
+    def source_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        return self.interpreter.environment.source_dir
+
+    @noPosargs
+    @noKwargs
+    @FeatureDeprecated('meson.build_root', '0.56.0', 'use meson.project_build_root() or meson.global_build_root() instead.')
+    def build_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        return self.interpreter.environment.build_dir
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('meson.project_source_root', '0.56.0')
+    def project_source_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        src = self.interpreter.environment.source_dir
+        sub = self.interpreter.root_subdir
+        if sub == '':
+            return src
+        return os.path.join(src, sub)
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('meson.project_build_root', '0.56.0')
+    def project_build_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        src = self.interpreter.environment.build_dir
+        sub = self.interpreter.root_subdir
+        if sub == '':
+            return src
+        return os.path.join(src, sub)
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('meson.global_source_root', '0.58.0')
+    def global_source_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        return self.interpreter.environment.source_dir
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('meson.global_build_root', '0.58.0')
+    def global_build_root_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        return self.interpreter.environment.build_dir
+
+    @noPosargs
+    @noKwargs
+    @FeatureDeprecated('meson.has_exe_wrapper', '0.55.0', 'use meson.can_run_host_binaries instead.')
+    def has_exe_wrapper_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+        return self._can_run_host_binaries_impl()
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('meson.can_run_host_binaries', '0.55.0')
+    def can_run_host_binaries_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+        return self._can_run_host_binaries_impl()
+
+    def _can_run_host_binaries_impl(self) -> bool:
+        return not (
+            self.build.environment.is_cross_build() and
+            self.build.environment.need_exe_wrapper() and
+            self.build.environment.exe_wrapper is None
+        )
+
+    @noPosargs
+    @noKwargs
+    def is_cross_build_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+        return self.build.environment.is_cross_build()
+
+    @typed_pos_args('meson.get_compiler', str)
+    @typed_kwargs('meson.get_compiler', NATIVE_KW)
+    def get_compiler_method(self, args: T.Tuple[str], kwargs: 'NativeKW') -> 'Compiler':
+        cname = args[0]
+        for_machine = kwargs['native']
+        clist = self.interpreter.coredata.compilers[for_machine]
+        try:
+            return clist[cname]
+        except KeyError:
+            raise InterpreterException(f'Tried to access compiler for language "{cname}", not specified for {for_machine.get_lower_case_name()} machine.')
+
+    @noPosargs
+    @noKwargs
+    def is_unity_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+        optval = self.interpreter.environment.coredata.get_option(OptionKey('unity'))
+        return optval == 'on' or (optval == 'subprojects' and self.interpreter.is_subproject())
+
+    @noPosargs
+    @noKwargs
+    def is_subproject_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+        return self.interpreter.is_subproject()
+
+    @typed_pos_args('meson.install_dependency_manifest', str)
+    @noKwargs
+    def install_dependency_manifest_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> None:
+        self.build.dep_manifest_name = args[0]
+
+    @FeatureNew('meson.override_find_program', '0.46.0')
+    @typed_pos_args('meson.override_find_program', str, (mesonlib.File, ExternalProgram, build.Executable))
+    @noKwargs
+    def override_find_program_method(self, args: T.Tuple[str, T.Union[mesonlib.File, ExternalProgram, build.Executable]], kwargs: 'TYPE_kwargs') -> None:
+        name, exe = args
+        if isinstance(exe, mesonlib.File):
+            abspath = exe.absolute_path(self.interpreter.environment.source_dir,
+                                        self.interpreter.environment.build_dir)
+            if not os.path.exists(abspath):
+                raise InterpreterException(f'Tried to override {name} with a file that does not exist.')
+            exe = OverrideProgram(name, [abspath])
+        self.interpreter.add_find_program_override(name, exe)
+
+    @typed_kwargs(
+        'meson.override_dependency',
+        NATIVE_KW,
+        KwargInfo('static', (bool, NoneType), since='0.60.0'),
+    )
+    @typed_pos_args('meson.override_dependency', str, dependencies.Dependency)
+    @FeatureNew('meson.override_dependency', '0.54.0')
+    def override_dependency_method(self, args: T.Tuple[str, dependencies.Dependency], kwargs: 'FuncOverrideDependency') -> None:
+        name, dep = args
+        if not name:
+            raise InterpreterException('First argument must be a string and cannot be empty')
+
+        optkey = OptionKey('default_library', subproject=self.interpreter.subproject)
+        default_library = self.interpreter.coredata.get_option(optkey)
+        assert isinstance(default_library, str), 'for mypy'
+        static = kwargs['static']
+        if static is None:
+            # We don't know if dep represents a static or shared library, could
+            # be a mix of both. We assume it is following default_library
+            # value.
+            self._override_dependency_impl(name, dep, kwargs, static=None)
+            if default_library == 'static':
+                self._override_dependency_impl(name, dep, kwargs, static=True)
+            elif default_library == 'shared':
+                self._override_dependency_impl(name, dep, kwargs, static=False)
+            else:
+                self._override_dependency_impl(name, dep, kwargs, static=True)
+                self._override_dependency_impl(name, dep, kwargs, static=False)
+        else:
+            # dependency('foo') without specifying static kwarg should find this
+            # override regardless of the static value here. But do not raise error
+            # if it has already been overridden, which would happen when overriding
+            # static and shared separately:
+            # meson.override_dependency('foo', shared_dep, static: false)
+            # meson.override_dependency('foo', static_dep, static: true)
+            # In that case dependency('foo') would return the first override.
+            self._override_dependency_impl(name, dep, kwargs, static=None, permissive=True)
+            self._override_dependency_impl(name, dep, kwargs, static=static)
+
+    def _override_dependency_impl(self, name: str, dep: dependencies.Dependency, kwargs: 'FuncOverrideDependency',
+                                  static: T.Optional[bool], permissive: bool = False) -> None:
+        # We need the cast here as get_dep_identifier works on such a dict,
+        # which FuncOverrideDependency is, but mypy can't figure that out
+        nkwargs = T.cast('T.Dict[str, T.Any]', kwargs.copy())
+        if static is None:
+            del nkwargs['static']
+        else:
+            nkwargs['static'] = static
+        identifier = dependencies.get_dep_identifier(name, nkwargs)
+        for_machine = kwargs['native']
+        override = self.build.dependency_overrides[for_machine].get(identifier)
+        if override:
+            if permissive:
+                return
+            m = 'Tried to override dependency {!r} which has already been resolved or overridden at {}'
+            location = mlog.get_error_location_string(override.node.filename, override.node.lineno)
+            raise InterpreterException(m.format(name, location))
+        self.build.dependency_overrides[for_machine][identifier] = \
+            build.DependencyOverride(dep, self.interpreter.current_node)
+
+    @noPosargs
+    @noKwargs
+    def project_version_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        return self.build.dep_manifest[self.interpreter.active_projectname].version
+
+    @FeatureNew('meson.project_license()', '0.45.0')
+    @noPosargs
+    @noKwargs
+    def project_license_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> T.List[str]:
+        return self.build.dep_manifest[self.interpreter.active_projectname].license
+
+    @FeatureNew('meson.project_license_files()', '1.1.0')
+    @noPosargs
+    @noKwargs
+    def project_license_files_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[mesonlib.File]:
+        return [l[1] for l in self.build.dep_manifest[self.interpreter.active_projectname].license_files]
+
+    @noPosargs
+    @noKwargs
+    def version_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> MesonVersionString:
+        return MesonVersionString(self.interpreter.coredata.version)
+
+    @noPosargs
+    @noKwargs
+    def project_name_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        return self.interpreter.active_projectname
+
+    def __get_external_property_impl(self, propname: str, fallback: T.Optional[object], machine: MachineChoice) -> object:
+        """Shared implementation for get_cross_property and get_external_property."""
+        try:
+            return self.interpreter.environment.properties[machine][propname]
+        except KeyError:
+            if fallback is not None:
+                return fallback
+            raise InterpreterException(f'Unknown property for {machine.get_lower_case_name()} machine: {propname}')
+
+    @noArgsFlattening
+    @FeatureDeprecated('meson.get_cross_property', '0.58.0', 'Use meson.get_external_property() instead')
+    @typed_pos_args('meson.get_cross_property', str, optargs=[object])
+    @noKwargs
+    def get_cross_property_method(self, args: T.Tuple[str, T.Optional[object]], kwargs: 'TYPE_kwargs') -> object:
+        propname, fallback = args
+        return self.__get_external_property_impl(propname, fallback, MachineChoice.HOST)
+
+    @noArgsFlattening
+    @FeatureNew('meson.get_external_property', '0.54.0')
+    @typed_pos_args('meson.get_external_property', str, optargs=[object])
+    @typed_kwargs('meson.get_external_property', NATIVE_KW)
+    def get_external_property_method(self, args: T.Tuple[str, T.Optional[object]], kwargs: 'NativeKW') -> object:
+        propname, fallback = args
+        return self.__get_external_property_impl(propname, fallback, kwargs['native'])
+
+    @FeatureNew('meson.has_external_property', '0.58.0')
+    @typed_pos_args('meson.has_external_property', str)
+    @typed_kwargs('meson.has_external_property', NATIVE_KW)
+    def has_external_property_method(self, args: T.Tuple[str], kwargs: 'NativeKW') -> bool:
+        prop_name = args[0]
+        return prop_name in self.interpreter.environment.properties[kwargs['native']]
+
+    @FeatureNew('add_devenv', '0.58.0')
+    @typed_kwargs('environment', ENV_METHOD_KW, ENV_SEPARATOR_KW.evolve(since='0.62.0'))
+    @typed_pos_args('add_devenv', (str, list, dict, build.EnvironmentVariables))
+    def add_devenv_method(self, args: T.Tuple[T.Union[str, list, dict, build.EnvironmentVariables]],
+                          kwargs: 'AddDevenvKW') -> None:
+        env = args[0]
+        msg = ENV_KW.validator(env)
+        if msg:
+            raise build.InvalidArguments(f'"add_devenv": {msg}')
+        converted = env_convertor_with_method(env, kwargs['method'], kwargs['separator'])
+        assert isinstance(converted, build.EnvironmentVariables)
+        self.build.devenv.append(converted)
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('meson.build_options', '1.1.0')
+    def build_options_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        options = self.interpreter.user_defined_options
+        if options is None:
+            return ''
+        return coredata.format_cmd_line_options(options)
diff --git a/vendored-meson/meson/mesonbuild/interpreter/primitives/__init__.py b/vendored-meson/meson/mesonbuild/interpreter/primitives/__init__.py
new file mode 100644
index 000000000000..aebef414f9fc
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreter/primitives/__init__.py
@@ -0,0 +1,29 @@
+# Copyright 2021 The Meson development team
+# SPDX-license-identifier: Apache-2.0
+
+__all__ = [
+    'ArrayHolder',
+    'BooleanHolder',
+    'DictHolder',
+    'IntegerHolder',
+    'RangeHolder',
+    'StringHolder',
+    'MesonVersionString',
+    'MesonVersionStringHolder',
+    'DependencyVariableString',
+    'DependencyVariableStringHolder',
+    'OptionString',
+    'OptionStringHolder',
+]
+
+from .array import ArrayHolder
+from .boolean import BooleanHolder
+from .dict import DictHolder
+from .integer import IntegerHolder
+from .range import RangeHolder
+from .string import (
+    StringHolder,
+    MesonVersionString, MesonVersionStringHolder,
+    DependencyVariableString, DependencyVariableStringHolder,
+    OptionString, OptionStringHolder,
+)
diff --git a/vendored-meson/meson/mesonbuild/interpreter/primitives/array.py b/vendored-meson/meson/mesonbuild/interpreter/primitives/array.py
new file mode 100644
index 000000000000..eeea112e2cf5
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreter/primitives/array.py
@@ -0,0 +1,108 @@
+# Copyright 2021 The Meson development team
+# SPDX-license-identifier: Apache-2.0
+from __future__ import annotations
+
+import typing as T
+
+from ...interpreterbase import (
+    ObjectHolder,
+    IterableObject,
+    MesonOperator,
+    typed_operator,
+    noKwargs,
+    noPosargs,
+    noArgsFlattening,
+    typed_pos_args,
+    FeatureNew,
+
+    TYPE_var,
+
+    InvalidArguments,
+)
+from ...mparser import PlusAssignmentNode
+
+if T.TYPE_CHECKING:
+    # Object holders need the actual interpreter
+    from ...interpreter import Interpreter
+    from ...interpreterbase import TYPE_kwargs
+
+class ArrayHolder(ObjectHolder[T.List[TYPE_var]], IterableObject):
+    def __init__(self, obj: T.List[TYPE_var], interpreter: 'Interpreter') -> None:
+        super().__init__(obj, interpreter)
+        self.methods.update({
+            'contains': self.contains_method,
+            'length': self.length_method,
+            'get': self.get_method,
+        })
+
+        self.trivial_operators.update({
+            MesonOperator.EQUALS: (list, lambda x: self.held_object == x),
+            MesonOperator.NOT_EQUALS: (list, lambda x: self.held_object != x),
+            MesonOperator.IN: (object, lambda x: x in self.held_object),
+            MesonOperator.NOT_IN: (object, lambda x: x not in self.held_object),
+        })
+
+        # Use actual methods for functions that require additional checks
+        self.operators.update({
+            MesonOperator.PLUS: self.op_plus,
+            MesonOperator.INDEX: self.op_index,
+        })
+
+    def display_name(self) -> str:
+        return 'array'
+
+    def iter_tuple_size(self) -> None:
+        return None
+
+    def iter_self(self) -> T.Iterator[TYPE_var]:
+        return iter(self.held_object)
+
+    def size(self) -> int:
+        return len(self.held_object)
+
+    @noArgsFlattening
+    @noKwargs
+    @typed_pos_args('array.contains', object)
+    def contains_method(self, args: T.Tuple[object], kwargs: TYPE_kwargs) -> bool:
+        def check_contains(el: T.List[TYPE_var]) -> bool:
+            for element in el:
+                if isinstance(element, list):
+                    found = check_contains(element)
+                    if found:
+                        return True
+                if element == args[0]:
+                    return True
+            return False
+        return check_contains(self.held_object)
+
+    @noKwargs
+    @noPosargs
+    def length_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:
+        return len(self.held_object)
+
+    @noArgsFlattening
+    @noKwargs
+    @typed_pos_args('array.get', int, optargs=[object])
+    def get_method(self, args: T.Tuple[int, T.Optional[TYPE_var]], kwargs: TYPE_kwargs) -> TYPE_var:
+        index = args[0]
+        if index < -len(self.held_object) or index >= len(self.held_object):
+            if args[1] is None:
+                raise InvalidArguments(f'Array index {index} is out of bounds for array of size {len(self.held_object)}.')
+            return args[1]
+        return self.held_object[index]
+
+    @typed_operator(MesonOperator.PLUS, object)
+    def op_plus(self, other: TYPE_var) -> T.List[TYPE_var]:
+        if not isinstance(other, list):
+            if not isinstance(self.current_node, PlusAssignmentNode):
+                FeatureNew.single_use('list.', '0.60.0', self.subproject, 'The right hand operand was not a list.',
+                                      location=self.current_node)
+            other = [other]
+        return self.held_object + other
+
+    @typed_operator(MesonOperator.INDEX, int)
+    def op_index(self, other: int) -> TYPE_var:
+        try:
+            return self.held_object[other]
+        except IndexError:
+            raise InvalidArguments(f'Index {other} out of bounds of array of size {len(self.held_object)}.')
diff --git a/vendored-meson/meson/mesonbuild/interpreter/primitives/boolean.py b/vendored-meson/meson/mesonbuild/interpreter/primitives/boolean.py
new file mode 100644
index 000000000000..4b49caf458a0
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreter/primitives/boolean.py
@@ -0,0 +1,52 @@
+# Copyright 2021 The Meson development team
+# SPDX-license-identifier: Apache-2.0
+from __future__ import annotations
+
+from ...interpreterbase import (
+    ObjectHolder,
+    MesonOperator,
+    typed_pos_args,
+    noKwargs,
+    noPosargs,
+
+    InvalidArguments
+)
+
+import typing as T
+
+if T.TYPE_CHECKING:
+    # Object holders need the actual interpreter
+    from ...interpreter import Interpreter
+    from ...interpreterbase import TYPE_var, TYPE_kwargs
+
+class BooleanHolder(ObjectHolder[bool]):
+    def __init__(self, obj: bool, interpreter: 'Interpreter') -> None:
+        super().__init__(obj, interpreter)
+        self.methods.update({
+            'to_int': self.to_int_method,
+            'to_string': self.to_string_method,
+        })
+
+        self.trivial_operators.update({
+            MesonOperator.BOOL: (None, lambda x: self.held_object),
+            MesonOperator.NOT: (None, lambda x: not self.held_object),
+            MesonOperator.EQUALS: (bool, lambda x: self.held_object == x),
+            MesonOperator.NOT_EQUALS: (bool, lambda x: self.held_object != x),
+        })
+
+    def display_name(self) -> str:
+        return 'bool'
+
+    @noKwargs
+    @noPosargs
+    def to_int_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:
+        return 1 if self.held_object else 0
+
+    @noKwargs
+    @typed_pos_args('bool.to_string', optargs=[str, str])
+    def to_string_method(self, args: T.Tuple[T.Optional[str], T.Optional[str]], kwargs: TYPE_kwargs) -> str:
+        true_str = args[0] or 'true'
+        false_str = args[1] or 'false'
+        if any(x is not None for x in args) and not all(x is not None for x in args):
+            raise InvalidArguments('bool.to_string() must have either no arguments or exactly two string arguments that signify what values to return for true and false.')
+        return true_str if self.held_object else false_str
diff --git a/vendored-meson/meson/mesonbuild/interpreter/primitives/dict.py b/vendored-meson/meson/mesonbuild/interpreter/primitives/dict.py
new file mode 100644
index 000000000000..ac7c99b8a8a3
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreter/primitives/dict.py
@@ -0,0 +1,88 @@
+# Copyright 2021 The Meson development team
+# SPDX-license-identifier: Apache-2.0
+from __future__ import annotations
+
+import typing as T
+
+from ...interpreterbase import (
+    ObjectHolder,
+    IterableObject,
+    MesonOperator,
+    typed_operator,
+    noKwargs,
+    noPosargs,
+    noArgsFlattening,
+    typed_pos_args,
+
+    TYPE_var,
+
+    InvalidArguments,
+)
+
+if T.TYPE_CHECKING:
+    # Object holders need the actual interpreter
+    from ...interpreter import Interpreter
+    from ...interpreterbase import TYPE_kwargs
+
+class DictHolder(ObjectHolder[T.Dict[str, TYPE_var]], IterableObject):
+    def __init__(self, obj: T.Dict[str, TYPE_var], interpreter: 'Interpreter') -> None:
+        super().__init__(obj, interpreter)
+        self.methods.update({
+            'has_key': self.has_key_method,
+            'keys': self.keys_method,
+            'get': self.get_method,
+        })
+
+        self.trivial_operators.update({
+            # Arithmetic
+            MesonOperator.PLUS: (dict, lambda x: {**self.held_object, **x}),
+
+            # Comparison
+            MesonOperator.EQUALS: (dict, lambda x: self.held_object == x),
+            MesonOperator.NOT_EQUALS: (dict, lambda x: self.held_object != x),
+            MesonOperator.IN: (str, lambda x: x in self.held_object),
+            MesonOperator.NOT_IN: (str, lambda x: x not in self.held_object),
+        })
+
+        # Use actual methods for functions that require additional checks
+        self.operators.update({
+            MesonOperator.INDEX: self.op_index,
+        })
+
+    def display_name(self) -> str:
+        return 'dict'
+
+    def iter_tuple_size(self) -> int:
+        return 2
+
+    def iter_self(self) -> T.Iterator[T.Tuple[str, TYPE_var]]:
+        return iter(self.held_object.items())
+
+    def size(self) -> int:
+        return len(self.held_object)
+
+    @noKwargs
+    @typed_pos_args('dict.has_key', str)
+    def has_key_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
+        return args[0] in self.held_object
+
+    @noKwargs
+    @noPosargs
+    def keys_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[str]:
+        return sorted(self.held_object)
+
+    @noArgsFlattening
+    @noKwargs
+    @typed_pos_args('dict.get', str, optargs=[object])
+    def get_method(self, args: T.Tuple[str, T.Optional[TYPE_var]], kwargs: TYPE_kwargs) -> TYPE_var:
+        if args[0] in self.held_object:
+            return self.held_object[args[0]]
+        if args[1] is not None:
+            return args[1]
+        raise InvalidArguments(f'Key {args[0]!r} is not in the dictionary.')
+
+    @typed_operator(MesonOperator.INDEX, str)
+    def op_index(self, other: str) -> TYPE_var:
+        if other not in self.held_object:
+            raise InvalidArguments(f'Key {other} is not in the dictionary.')
+        return self.held_object[other]
diff --git a/vendored-meson/meson/mesonbuild/interpreter/primitives/integer.py b/vendored-meson/meson/mesonbuild/interpreter/primitives/integer.py
new file mode 100644
index 000000000000..b7f353210134
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreter/primitives/integer.py
@@ -0,0 +1,83 @@
+# Copyright 2021 The Meson development team
+# SPDX-license-identifier: Apache-2.0
+from __future__ import annotations
+
+from ...interpreterbase import (
+    FeatureBroken, InvalidArguments, MesonOperator, ObjectHolder,
+    noKwargs, noPosargs, typed_operator,
+)
+
+import typing as T
+
+if T.TYPE_CHECKING:
+    # Object holders need the actual interpreter
+    from ...interpreter import Interpreter
+    from ...interpreterbase import TYPE_var, TYPE_kwargs
+
+class IntegerHolder(ObjectHolder[int]):
+    def __init__(self, obj: int, interpreter: 'Interpreter') -> None:
+        super().__init__(obj, interpreter)
+        self.methods.update({
+            'is_even': self.is_even_method,
+            'is_odd': self.is_odd_method,
+            'to_string': self.to_string_method,
+        })
+
+        self.trivial_operators.update({
+            # Arithmetic
+            MesonOperator.UMINUS: (None, lambda x: -self.held_object),
+            MesonOperator.PLUS: (int, lambda x: self.held_object + x),
+            MesonOperator.MINUS: (int, lambda x: self.held_object - x),
+            MesonOperator.TIMES: (int, lambda x: self.held_object * x),
+
+            # Comparison
+            MesonOperator.EQUALS: (int, lambda x: self.held_object == x),
+            MesonOperator.NOT_EQUALS: (int, lambda x: self.held_object != x),
+            MesonOperator.GREATER: (int, lambda x: self.held_object > x),
+            MesonOperator.LESS: (int, lambda x: self.held_object < x),
+            MesonOperator.GREATER_EQUALS: (int, lambda x: self.held_object >= x),
+            MesonOperator.LESS_EQUALS: (int, lambda x: self.held_object <= x),
+        })
+
+        # Use actual methods for functions that require additional checks
+        self.operators.update({
+            MesonOperator.DIV: self.op_div,
+            MesonOperator.MOD: self.op_mod,
+        })
+
+    def display_name(self) -> str:
+        return 'int'
+
+    def operator_call(self, operator: MesonOperator, other: TYPE_var) -> TYPE_var:
+        if isinstance(other, bool):
+            FeatureBroken.single_use('int operations with non-int', '1.2.0', self.subproject,
+                                     'It is not commutative and only worked because of leaky Python abstractions.',
+                                     location=self.current_node)
+        return super().operator_call(operator, other)
+
+    @noKwargs
+    @noPosargs
+    def is_even_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return self.held_object % 2 == 0
+
+    @noKwargs
+    @noPosargs
+    def is_odd_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> bool:
+        return self.held_object % 2 != 0
+
+    @noKwargs
+    @noPosargs
+    def to_string_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return str(self.held_object)
+
+    @typed_operator(MesonOperator.DIV, int)
+    def op_div(self, other: int) -> int:
+        if other == 0:
+            raise InvalidArguments('Tried to divide by 0')
+        return self.held_object // other
+
+    @typed_operator(MesonOperator.MOD, int)
+    def op_mod(self, other: int) -> int:
+        if other == 0:
+            raise InvalidArguments('Tried to divide by 0')
+        return self.held_object % other
diff --git a/vendored-meson/meson/mesonbuild/interpreter/primitives/range.py b/vendored-meson/meson/mesonbuild/interpreter/primitives/range.py
new file mode 100644
index 000000000000..5eb5e033e03e
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreter/primitives/range.py
@@ -0,0 +1,38 @@
+# Copyright 2021 The Meson development team
+# SPDX-license-identifier: Apache-2.0
+from __future__ import annotations
+
+import typing as T
+
+from ...interpreterbase import (
+    MesonInterpreterObject,
+    IterableObject,
+    MesonOperator,
+    InvalidArguments,
+)
+
+if T.TYPE_CHECKING:
+    from ...interpreterbase import SubProject
+
+class RangeHolder(MesonInterpreterObject, IterableObject):
+    def __init__(self, start: int, stop: int, step: int, *, subproject: 'SubProject') -> None:
+        super().__init__(subproject=subproject)
+        self.range = range(start, stop, step)
+        self.operators.update({
+            MesonOperator.INDEX: self.op_index,
+        })
+
+    def op_index(self, other: int) -> int:
+        try:
+            return self.range[other]
+        except IndexError:
+            raise InvalidArguments(f'Index {other} out of bounds of range.')
+
+    def iter_tuple_size(self) -> None:
+        return None
+
+    def iter_self(self) -> T.Iterator[int]:
+        return iter(self.range)
+
+    def size(self) -> int:
+        return len(self.range)
diff --git a/vendored-meson/meson/mesonbuild/interpreter/primitives/string.py b/vendored-meson/meson/mesonbuild/interpreter/primitives/string.py
new file mode 100644
index 000000000000..b825128e5809
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreter/primitives/string.py
@@ -0,0 +1,244 @@
+# Copyright 2021 The Meson development team
+# SPDX-license-identifier: Apache-2.0
+from __future__ import annotations
+
+import re
+import os
+
+import typing as T
+
+from ...mesonlib import version_compare
+from ...interpreterbase import (
+    ObjectHolder,
+    MesonOperator,
+    FeatureNew,
+    typed_operator,
+    noArgsFlattening,
+    noKwargs,
+    noPosargs,
+    typed_pos_args,
+
+    InvalidArguments,
+)
+
+
+if T.TYPE_CHECKING:
+    # Object holders need the actual interpreter
+    from ...interpreter import Interpreter
+    from ...interpreterbase import TYPE_var, TYPE_kwargs
+
+class StringHolder(ObjectHolder[str]):
+    def __init__(self, obj: str, interpreter: 'Interpreter') -> None:
+        super().__init__(obj, interpreter)
+        self.methods.update({
+            'contains': self.contains_method,
+            'startswith': self.startswith_method,
+            'endswith': self.endswith_method,
+            'format': self.format_method,
+            'join': self.join_method,
+            'replace': self.replace_method,
+            'split': self.split_method,
+            'splitlines': self.splitlines_method,
+            'strip': self.strip_method,
+            'substring': self.substring_method,
+            'to_int': self.to_int_method,
+            'to_lower': self.to_lower_method,
+            'to_upper': self.to_upper_method,
+            'underscorify': self.underscorify_method,
+            'version_compare': self.version_compare_method,
+        })
+
+        self.trivial_operators.update({
+            # Arithmetic
+            MesonOperator.PLUS: (str, lambda x: self.held_object + x),
+
+            # Comparison
+            MesonOperator.EQUALS: (str, lambda x: self.held_object == x),
+            MesonOperator.NOT_EQUALS: (str, lambda x: self.held_object != x),
+            MesonOperator.GREATER: (str, lambda x: self.held_object > x),
+            MesonOperator.LESS: (str, lambda x: self.held_object < x),
+            MesonOperator.GREATER_EQUALS: (str, lambda x: self.held_object >= x),
+            MesonOperator.LESS_EQUALS: (str, lambda x: self.held_object <= x),
+        })
+
+        # Use actual methods for functions that require additional checks
+        self.operators.update({
+            MesonOperator.DIV: self.op_div,
+            MesonOperator.INDEX: self.op_index,
+            MesonOperator.IN: self.op_in,
+            MesonOperator.NOT_IN: self.op_notin,
+        })
+
+    def display_name(self) -> str:
+        return 'str'
+
+    @noKwargs
+    @typed_pos_args('str.contains', str)
+    def contains_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
+        return self.held_object.find(args[0]) >= 0
+
+    @noKwargs
+    @typed_pos_args('str.startswith', str)
+    def startswith_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
+        return self.held_object.startswith(args[0])
+
+    @noKwargs
+    @typed_pos_args('str.endswith', str)
+    def endswith_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
+        return self.held_object.endswith(args[0])
+
+    @noArgsFlattening
+    @noKwargs
+    @typed_pos_args('str.format', varargs=object)
+    def format_method(self, args: T.Tuple[T.List[object]], kwargs: TYPE_kwargs) -> str:
+        arg_strings: T.List[str] = []
+        for arg in args[0]:
+            if isinstance(arg, bool): # Python boolean is upper case.
+                arg = str(arg).lower()
+            arg_strings.append(str(arg))
+
+        def arg_replace(match: T.Match[str]) -> str:
+            idx = int(match.group(1))
+            if idx >= len(arg_strings):
+                raise InvalidArguments(f'Format placeholder @{idx}@ out of range.')
+            return arg_strings[idx]
+
+        return re.sub(r'@(\d+)@', arg_replace, self.held_object)
+
+    @noKwargs
+    @noPosargs
+    @FeatureNew('str.splitlines', '1.2.0')
+    def splitlines_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> T.List[str]:
+        return self.held_object.splitlines()
+
+    @noKwargs
+    @typed_pos_args('str.join', varargs=str)
+    def join_method(self, args: T.Tuple[T.List[str]], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.join(args[0])
+
+    @noKwargs
+    @FeatureNew('str.replace', '0.58.0')
+    @typed_pos_args('str.replace', str, str)
+    def replace_method(self, args: T.Tuple[str, str], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.replace(args[0], args[1])
+
+    @noKwargs
+    @typed_pos_args('str.split', optargs=[str])
+    def split_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> T.List[str]:
+        return self.held_object.split(args[0])
+
+    @noKwargs
+    @typed_pos_args('str.strip', optargs=[str])
+    def strip_method(self, args: T.Tuple[T.Optional[str]], kwargs: TYPE_kwargs) -> str:
+        if args[0]:
+            FeatureNew.single_use('str.strip with a positional argument', '0.43.0', self.subproject, location=self.current_node)
+        return self.held_object.strip(args[0])
+
+    @noKwargs
+    @FeatureNew('str.substring', '0.56.0')
+    @typed_pos_args('str.substring', optargs=[int, int])
+    def substring_method(self, args: T.Tuple[T.Optional[int], T.Optional[int]], kwargs: TYPE_kwargs) -> str:
+        start = args[0] if args[0] is not None else 0
+        end = args[1] if args[1] is not None else len(self.held_object)
+        return self.held_object[start:end]
+
+    @noKwargs
+    @noPosargs
+    def to_int_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> int:
+        try:
+            return int(self.held_object)
+        except ValueError:
+            raise InvalidArguments(f'String {self.held_object!r} cannot be converted to int')
+
+    @noKwargs
+    @noPosargs
+    def to_lower_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.lower()
+
+    @noKwargs
+    @noPosargs
+    def to_upper_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return self.held_object.upper()
+
+    @noKwargs
+    @noPosargs
+    def underscorify_method(self, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> str:
+        return re.sub(r'[^a-zA-Z0-9]', '_', self.held_object)
+
+    @noKwargs
+    @typed_pos_args('str.version_compare', str)
+    def version_compare_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
+        return version_compare(self.held_object, args[0])
+
+    @staticmethod
+    def _op_div(this: str, other: str) -> str:
+        return os.path.join(this, other).replace('\\', '/')
+
+    @FeatureNew('/ with string arguments', '0.49.0')
+    @typed_operator(MesonOperator.DIV, str)
+    def op_div(self, other: str) -> str:
+        return self._op_div(self.held_object, other)
+
+    @typed_operator(MesonOperator.INDEX, int)
+    def op_index(self, other: int) -> str:
+        try:
+            return self.held_object[other]
+        except IndexError:
+            raise InvalidArguments(f'Index {other} out of bounds of string of size {len(self.held_object)}.')
+
+    @FeatureNew('"in" string operator', '1.0.0')
+    @typed_operator(MesonOperator.IN, str)
+    def op_in(self, other: str) -> bool:
+        return other in self.held_object
+
+    @FeatureNew('"not in" string operator', '1.0.0')
+    @typed_operator(MesonOperator.NOT_IN, str)
+    def op_notin(self, other: str) -> bool:
+        return other not in self.held_object
+
+
+class MesonVersionString(str):
+    pass
+
+class MesonVersionStringHolder(StringHolder):
+    @noKwargs
+    @typed_pos_args('str.version_compare', str)
+    def version_compare_method(self, args: T.Tuple[str], kwargs: TYPE_kwargs) -> bool:
+        self.interpreter.tmp_meson_version = args[0]
+        return version_compare(self.held_object, args[0])
+
+# These special subclasses of string exist to cover the case where a dependency
+# exports a string variable interchangeable with a system dependency. This
+# matters because a dependency can only have string-type get_variable() return
+# values. If at any time dependencies start supporting additional variable
+# types, this class could be deprecated.
+class DependencyVariableString(str):
+    pass
+
+class DependencyVariableStringHolder(StringHolder):
+    def op_div(self, other: str) -> T.Union[str, DependencyVariableString]:
+        ret = super().op_div(other)
+        if '..' in other:
+            return ret
+        return DependencyVariableString(ret)
+
+
+class OptionString(str):
+    optname: str
+
+    def __new__(cls, value: str, name: str) -> 'OptionString':
+        obj = str.__new__(cls, value)
+        obj.optname = name
+        return obj
+
+    def __getnewargs__(self) -> T.Tuple[str, str]: # type: ignore # because the entire point of this is to diverge
+        return (str(self), self.optname)
+
+
+class OptionStringHolder(StringHolder):
+    held_object: OptionString
+
+    def op_div(self, other: str) -> T.Union[str, OptionString]:
+        ret = super().op_div(other)
+        name = self._op_div(self.held_object.optname, other)
+        return OptionString(ret, name)
diff --git a/vendored-meson/meson/mesonbuild/interpreter/type_checking.py b/vendored-meson/meson/mesonbuild/interpreter/type_checking.py
new file mode 100644
index 000000000000..8b57d06f15db
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreter/type_checking.py
@@ -0,0 +1,481 @@
+# SPDX-License-Identifier: Apache-2.0
+# Copyright © 2021 Intel Corporation
+
+"""Helpers for strict type checking."""
+
+from __future__ import annotations
+import os
+import typing as T
+
+from .. import compilers
+from ..build import (CustomTarget, BuildTarget,
+                     CustomTargetIndex, ExtractedObjects, GeneratedList, IncludeDirs,
+                     BothLibraries, SharedLibrary, StaticLibrary, Jar, Executable)
+from ..coredata import UserFeatureOption
+from ..dependencies import Dependency, InternalDependency
+from ..interpreterbase.decorators import KwargInfo, ContainerTypeInfo
+from ..mesonlib import (File, FileMode, MachineChoice, listify, has_path_sep,
+                        OptionKey, EnvironmentVariables)
+from ..programs import ExternalProgram
+
+# Helper definition for type checks that are `Optional[T]`
+NoneType: T.Type[None] = type(None)
+
+if T.TYPE_CHECKING:
+    from typing_extensions import Literal
+
+    from ..interpreterbase import TYPE_var
+    from ..mesonlib import EnvInitValueType
+
+    _FullEnvInitValueType = T.Union[EnvironmentVariables, T.List[str], T.List[T.List[str]], EnvInitValueType, str, None]
+
+
+def in_set_validator(choices: T.Set[str]) -> T.Callable[[str], T.Optional[str]]:
+    """Check that the choice given was one of the given set."""
+
+    def inner(check: str) -> T.Optional[str]:
+        if check not in choices:
+            return f"must be one of {', '.join(sorted(choices))}, not {check}"
+        return None
+
+    return inner
+
+
+def _language_validator(l: T.List[str]) -> T.Optional[str]:
+    """Validate language keyword argument.
+
+    Particularly for functions like `add_compiler()`, and `add_*_args()`
+    """
+    diff = {a.lower() for a in l}.difference(compilers.all_languages)
+    if diff:
+        return f'unknown languages: {", ".join(diff)}'
+    return None
+
+
+def _install_mode_validator(mode: T.List[T.Union[str, bool, int]]) -> T.Optional[str]:
+    """Validate the `install_mode` keyword argument.
+
+    This is a rather odd thing, it's a scalar, or an array of 3 values in the form:
+    [(str | False), (str | int | False) = False, (str | int | False) = False]
+    where the second and third components are not required and default to False.
+    """
+    if not mode:
+        return None
+    if True in mode:
+        return 'components can only be permission strings, numbers, or False'
+    if len(mode) > 3:
+        return 'may have at most 3 elements'
+
+    perms = mode[0]
+    if not isinstance(perms, (str, bool)):
+        return 'first component must be a permissions string or False'
+
+    if isinstance(perms, str):
+        if not len(perms) == 9:
+            return ('permissions string must be exactly 9 characters in the form rwxr-xr-x,'
+                    f' got {len(perms)}')
+        for i in [0, 3, 6]:
+            if perms[i] not in {'-', 'r'}:
+                return f'permissions character {i+1} must be "-" or "r", not {perms[i]}'
+        for i in [1, 4, 7]:
+            if perms[i] not in {'-', 'w'}:
+                return f'permissions character {i+1} must be "-" or "w", not {perms[i]}'
+        for i in [2, 5]:
+            if perms[i] not in {'-', 'x', 's', 'S'}:
+                return f'permissions character {i+1} must be "-", "s", "S", or "x", not {perms[i]}'
+        if perms[8] not in {'-', 'x', 't', 'T'}:
+            return f'permission character 9 must be "-", "t", "T", or "x", not {perms[8]}'
+
+        if len(mode) >= 2 and not isinstance(mode[1], (int, str, bool)):
+            return 'second component can only be a string, number, or False'
+        if len(mode) >= 3 and not isinstance(mode[2], (int, str, bool)):
+            return 'third component can only be a string, number, or False'
+
+    return None
+
+
+def _install_mode_convertor(mode: T.Optional[T.List[T.Union[str, bool, int]]]) -> FileMode:
+    """Convert the DSL form of the `install_mode` keyword argument to `FileMode`"""
+
+    if not mode:
+        return FileMode()
+
+    # This has already been validated by the validator. False denotes "use
+    # default". mypy is totally incapable of understanding it, because
+    # generators clobber types via homogeneous return. But also we *must*
+    # convert the first element different from the rest
+    m1 = mode[0] if isinstance(mode[0], str) else None
+    rest = (m if isinstance(m, (str, int)) else None for m in mode[1:])
+
+    return FileMode(m1, *rest)
+
+
+def _lower_strlist(input: T.List[str]) -> T.List[str]:
+    """Lower a list of strings.
+
+    mypy (but not pyright) gets confused about using a lambda as the convertor function
+    """
+    return [i.lower() for i in input]
+
+
+def variables_validator(contents: T.Union[str, T.List[str], T.Dict[str, str]]) -> T.Optional[str]:
+    if isinstance(contents, str):
+        contents = [contents]
+    if isinstance(contents, dict):
+        variables = contents
+    else:
+        variables = {}
+        for v in contents:
+            try:
+                key, val = v.split('=', 1)
+            except ValueError:
+                return f'variable {v!r} must have a value separated by equals sign.'
+            variables[key.strip()] = val.strip()
+    for k, v in variables.items():
+        if not k:
+            return 'empty variable name'
+        if not v:
+            return 'empty variable value'
+        if any(c.isspace() for c in k):
+            return f'invalid whitespace in variable name {k!r}'
+    return None
+
+
+def variables_convertor(contents: T.Union[str, T.List[str], T.Dict[str, str]]) -> T.Dict[str, str]:
+    if isinstance(contents, str):
+        contents = [contents]
+    if isinstance(contents, dict):
+        return contents
+    variables = {}
+    for v in contents:
+        key, val = v.split('=', 1)
+        variables[key.strip()] = val.strip()
+    return variables
+
+
+NATIVE_KW = KwargInfo(
+    'native', bool,
+    default=False,
+    convertor=lambda n: MachineChoice.BUILD if n else MachineChoice.HOST)
+
+LANGUAGE_KW = KwargInfo(
+    'language', ContainerTypeInfo(list, str, allow_empty=False),
+    listify=True,
+    required=True,
+    validator=_language_validator,
+    convertor=_lower_strlist)
+
+INSTALL_MODE_KW: KwargInfo[T.List[T.Union[str, bool, int]]] = KwargInfo(
+    'install_mode',
+    ContainerTypeInfo(list, (str, bool, int)),
+    listify=True,
+    default=[],
+    validator=_install_mode_validator,
+    convertor=_install_mode_convertor,
+)
+
+REQUIRED_KW: KwargInfo[T.Union[bool, UserFeatureOption]] = KwargInfo(
+    'required',
+    (bool, UserFeatureOption),
+    default=True,
+    # TODO: extract_required_kwarg could be converted to a convertor
+)
+
+DISABLER_KW: KwargInfo[bool] = KwargInfo('disabler', bool, default=False)
+
+def _env_validator(value: T.Union[EnvironmentVariables, T.List['TYPE_var'], T.Dict[str, 'TYPE_var'], str, None],
+                   only_dict_str: bool = True) -> T.Optional[str]:
+    def _splitter(v: str) -> T.Optional[str]:
+        split = v.split('=', 1)
+        if len(split) == 1:
+            return f'"{v}" is not two string values separated by an "="'
+        return None
+
+    if isinstance(value, str):
+        v = _splitter(value)
+        if v is not None:
+            return v
+    elif isinstance(value, list):
+        for i in listify(value):
+            if not isinstance(i, str):
+                return f"All array elements must be a string, not {i!r}"
+            v = _splitter(i)
+            if v is not None:
+                return v
+    elif isinstance(value, dict):
+        # We don't need to spilt here, just do the type checking
+        for k, dv in value.items():
+            if only_dict_str:
+                if any(i for i in listify(dv) if not isinstance(i, str)):
+                    return f"Dictionary element {k} must be a string or list of strings not {dv!r}"
+            elif isinstance(dv, list):
+                if any(not isinstance(i, str) for i in dv):
+                    return f"Dictionary element {k} must be a string, bool, integer or list of strings, not {dv!r}"
+            elif not isinstance(dv, (str, bool, int)):
+                return f"Dictionary element {k} must be a string, bool, integer or list of strings, not {dv!r}"
+    # We know that otherwise we have an EnvironmentVariables object or None, and
+    # we're okay at this point
+    return None
+
+def _options_validator(value: T.Union[EnvironmentVariables, T.List['TYPE_var'], T.Dict[str, 'TYPE_var'], str, None]) -> T.Optional[str]:
+    # Reusing the env validator is a little overkill, but nicer than duplicating the code
+    return _env_validator(value, only_dict_str=False)
+
+def split_equal_string(input: str) -> T.Tuple[str, str]:
+    """Split a string in the form `x=y`
+
+    This assumes that the string has already been validated to split properly.
+    """
+    a, b = input.split('=', 1)
+    return (a, b)
+
+# Split _env_convertor() and env_convertor_with_method() to make mypy happy.
+# It does not want extra arguments in KwargInfo convertor callable.
+def env_convertor_with_method(value: _FullEnvInitValueType,
+                              init_method: Literal['set', 'prepend', 'append'] = 'set',
+                              separator: str = os.pathsep) -> EnvironmentVariables:
+    if isinstance(value, str):
+        return EnvironmentVariables(dict([split_equal_string(value)]), init_method, separator)
+    elif isinstance(value, list):
+        return EnvironmentVariables(dict(split_equal_string(v) for v in listify(value)), init_method, separator)
+    elif isinstance(value, dict):
+        return EnvironmentVariables(value, init_method, separator)
+    elif value is None:
+        return EnvironmentVariables()
+    return value
+
+def _env_convertor(value: _FullEnvInitValueType) -> EnvironmentVariables:
+    return env_convertor_with_method(value)
+
+ENV_KW: KwargInfo[T.Union[EnvironmentVariables, T.List, T.Dict, str, None]] = KwargInfo(
+    'env',
+    (EnvironmentVariables, list, dict, str, NoneType),
+    validator=_env_validator,
+    convertor=_env_convertor,
+)
+
+DEPFILE_KW: KwargInfo[T.Optional[str]] = KwargInfo(
+    'depfile',
+    (str, type(None)),
+    validator=lambda x: 'Depfile must be a plain filename with a subdirectory' if has_path_sep(x) else None
+)
+
+# TODO: CustomTargetIndex should be supported here as well
+DEPENDS_KW: KwargInfo[T.List[T.Union[BuildTarget, CustomTarget]]] = KwargInfo(
+    'depends',
+    ContainerTypeInfo(list, (BuildTarget, CustomTarget)),
+    listify=True,
+    default=[],
+)
+
+DEPEND_FILES_KW: KwargInfo[T.List[T.Union[str, File]]] = KwargInfo(
+    'depend_files',
+    ContainerTypeInfo(list, (File, str)),
+    listify=True,
+    default=[],
+)
+
+COMMAND_KW: KwargInfo[T.List[T.Union[str, BuildTarget, CustomTarget, CustomTargetIndex, ExternalProgram, File]]] = KwargInfo(
+    'command',
+    # TODO: should accept CustomTargetIndex as well?
+    ContainerTypeInfo(list, (str, BuildTarget, CustomTarget, CustomTargetIndex, ExternalProgram, File), allow_empty=False),
+    required=True,
+    listify=True,
+    default=[],
+)
+
+def _override_options_convertor(raw: T.Union[str, T.List[str], T.Dict[str, T.Union[str, int, bool, T.List[str]]]]) -> T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]]:
+    if isinstance(raw, str):
+        raw = [raw]
+    if isinstance(raw, list):
+        output: T.Dict[OptionKey, T.Union[str, int, bool, T.List[str]]] = {}
+        for each in raw:
+            k, v = split_equal_string(each)
+            output[OptionKey.from_string(k)] = v
+        return output
+    return {OptionKey.from_string(k): v for k, v in raw.items()}
+
+
+OVERRIDE_OPTIONS_KW: KwargInfo[T.Union[str, T.Dict[str, T.Union[str, int, bool, T.List[str]]], T.List[str]]] = KwargInfo(
+    'override_options',
+    (str, ContainerTypeInfo(list, str), ContainerTypeInfo(dict, (str, int, bool, list))),
+    default={},
+    validator=_options_validator,
+    convertor=_override_options_convertor,
+    since_values={dict: '1.2.0'},
+)
+
+
+def _output_validator(outputs: T.List[str]) -> T.Optional[str]:
+    output_set = set(outputs)
+    if len(output_set) != len(outputs):
+        seen = set()
+        for el in outputs:
+            if el in seen:
+                return f"contains {el!r} multiple times, but no duplicates are allowed."
+            seen.add(el)
+    for i in outputs:
+        if i == '':
+            return 'Output must not be empty.'
+        elif i.strip() == '':
+            return 'Output must not consist only of whitespace.'
+        elif has_path_sep(i):
+            return f'Output {i!r} must not contain a path segment.'
+        elif '@INPUT' in i:
+            return f'output {i!r} contains "@INPUT", which is invalid. Did you mean "@PLAINNAME@" or "@BASENAME@?'
+
+    return None
+
+MULTI_OUTPUT_KW: KwargInfo[T.List[str]] = KwargInfo(
+    'output',
+    ContainerTypeInfo(list, str, allow_empty=False),
+    listify=True,
+    required=True,
+    default=[],
+    validator=_output_validator,
+)
+
+OUTPUT_KW: KwargInfo[str] = KwargInfo(
+    'output',
+    str,
+    required=True,
+    validator=lambda x: _output_validator([x])
+)
+
+CT_INPUT_KW: KwargInfo[T.List[T.Union[str, File, ExternalProgram, BuildTarget, CustomTarget, CustomTargetIndex, ExtractedObjects, GeneratedList]]] = KwargInfo(
+    'input',
+    ContainerTypeInfo(list, (str, File, ExternalProgram, BuildTarget, CustomTarget, CustomTargetIndex, ExtractedObjects, GeneratedList)),
+    listify=True,
+    default=[],
+)
+
+CT_INSTALL_TAG_KW: KwargInfo[T.List[T.Union[str, bool]]] = KwargInfo(
+    'install_tag',
+    ContainerTypeInfo(list, (str, bool)),
+    listify=True,
+    default=[],
+    since='0.60.0',
+    convertor=lambda x: [y if isinstance(y, str) else None for y in x],
+)
+
+INSTALL_TAG_KW: KwargInfo[T.Optional[str]] = KwargInfo('install_tag', (str, NoneType))
+
+INSTALL_KW = KwargInfo('install', bool, default=False)
+
+CT_INSTALL_DIR_KW: KwargInfo[T.List[T.Union[str, Literal[False]]]] = KwargInfo(
+    'install_dir',
+    ContainerTypeInfo(list, (str, bool)),
+    listify=True,
+    default=[],
+    validator=lambda x: 'must be `false` if boolean' if True in x else None,
+)
+
+CT_BUILD_BY_DEFAULT: KwargInfo[T.Optional[bool]] = KwargInfo('build_by_default', (bool, type(None)), since='0.40.0')
+
+CT_BUILD_ALWAYS: KwargInfo[T.Optional[bool]] = KwargInfo(
+    'build_always', (bool, NoneType),
+    deprecated='0.47.0',
+    deprecated_message='combine build_by_default and build_always_stale instead.',
+)
+
+CT_BUILD_ALWAYS_STALE: KwargInfo[T.Optional[bool]] = KwargInfo(
+    'build_always_stale', (bool, NoneType),
+    since='0.47.0',
+)
+
+INSTALL_DIR_KW: KwargInfo[T.Optional[str]] = KwargInfo('install_dir', (str, NoneType))
+
+INCLUDE_DIRECTORIES: KwargInfo[T.List[T.Union[str, IncludeDirs]]] = KwargInfo(
+    'include_directories',
+    ContainerTypeInfo(list, (str, IncludeDirs)),
+    listify=True,
+    default=[],
+)
+
+DEFAULT_OPTIONS = OVERRIDE_OPTIONS_KW.evolve(name='default_options')
+
+ENV_METHOD_KW = KwargInfo('method', str, default='set', since='0.62.0',
+                          validator=in_set_validator({'set', 'prepend', 'append'}))
+
+ENV_SEPARATOR_KW = KwargInfo('separator', str, default=os.pathsep)
+
+DEPENDENCIES_KW: KwargInfo[T.List[Dependency]] = KwargInfo(
+    'dependencies',
+    # InternalDependency is a subclass of Dependency, but we want to
+    # print it in error messages
+    ContainerTypeInfo(list, (Dependency, InternalDependency)),
+    listify=True,
+    default=[],
+)
+
+D_MODULE_VERSIONS_KW: KwargInfo[T.List[T.Union[str, int]]] = KwargInfo(
+    'd_module_versions',
+    ContainerTypeInfo(list, (str, int)),
+    listify=True,
+    default=[],
+)
+
+_link_with_error = '''can only be self-built targets, external dependencies (including libraries) must go in "dependencies".'''
+
+# Allow Dependency for the better error message? But then in other cases it will list this as one of the allowed types!
+LINK_WITH_KW: KwargInfo[T.List[T.Union[BothLibraries, SharedLibrary, StaticLibrary, CustomTarget, CustomTargetIndex, Jar, Executable]]] = KwargInfo(
+    'link_with',
+    ContainerTypeInfo(list, (BothLibraries, SharedLibrary, StaticLibrary, CustomTarget, CustomTargetIndex, Jar, Executable, Dependency)),
+    listify=True,
+    default=[],
+    validator=lambda x: _link_with_error if any(isinstance(i, Dependency) for i in x) else None,
+)
+
+def link_whole_validator(values: T.List[T.Union[StaticLibrary, CustomTarget, CustomTargetIndex, Dependency]]) -> T.Optional[str]:
+    for l in values:
+        if isinstance(l, (CustomTarget, CustomTargetIndex)) and l.links_dynamically():
+            return f'{type(l).__name__} returning a shared library is not allowed'
+        if isinstance(l, Dependency):
+            return _link_with_error
+    return None
+
+LINK_WHOLE_KW: KwargInfo[T.List[T.Union[BothLibraries, StaticLibrary, CustomTarget, CustomTargetIndex]]] = KwargInfo(
+    'link_whole',
+    ContainerTypeInfo(list, (BothLibraries, StaticLibrary, CustomTarget, CustomTargetIndex, Dependency)),
+    listify=True,
+    default=[],
+    validator=link_whole_validator,
+)
+
+SOURCES_KW: KwargInfo[T.List[T.Union[str, File, CustomTarget, CustomTargetIndex, GeneratedList]]] = KwargInfo(
+    'sources',
+    ContainerTypeInfo(list, (str, File, CustomTarget, CustomTargetIndex, GeneratedList)),
+    listify=True,
+    default=[],
+)
+
+VARIABLES_KW: KwargInfo[T.Dict[str, str]] = KwargInfo(
+    'variables',
+    # str is listified by validator/convertor, cannot use listify=True here because
+    # that would listify dict too.
+    (str, ContainerTypeInfo(list, str), ContainerTypeInfo(dict, str)), # type: ignore
+    validator=variables_validator,
+    convertor=variables_convertor,
+    default={},
+)
+
+PRESERVE_PATH_KW: KwargInfo[bool] = KwargInfo('preserve_path', bool, default=False, since='0.63.0')
+
+TEST_KWS: T.List[KwargInfo] = [
+    KwargInfo('args', ContainerTypeInfo(list, (str, File, BuildTarget, CustomTarget, CustomTargetIndex)),
+              listify=True, default=[]),
+    KwargInfo('should_fail', bool, default=False),
+    KwargInfo('timeout', int, default=30),
+    KwargInfo('workdir', (str, NoneType), default=None,
+              validator=lambda x: 'must be an absolute path' if not os.path.isabs(x) else None),
+    KwargInfo('protocol', str,
+              default='exitcode',
+              validator=in_set_validator({'exitcode', 'tap', 'gtest', 'rust'}),
+              since_values={'gtest': '0.55.0', 'rust': '0.57.0'}),
+    KwargInfo('priority', int, default=0, since='0.52.0'),
+    # TODO: env needs reworks of the way the environment variable holder itself works probably
+    ENV_KW,
+    DEPENDS_KW.evolve(since='0.46.0'),
+    KwargInfo('suite', ContainerTypeInfo(list, str), listify=True, default=['']),  # yes, a list of empty string
+    KwargInfo('verbose', bool, default=False, since='0.62.0'),
+]
diff --git a/vendored-meson/meson/mesonbuild/interpreterbase/__init__.py b/vendored-meson/meson/mesonbuild/interpreterbase/__init__.py
new file mode 100644
index 000000000000..f0c2002cd3ed
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreterbase/__init__.py
@@ -0,0 +1,139 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__all__ = [
+    'InterpreterObject',
+    'MesonInterpreterObject',
+    'ObjectHolder',
+    'IterableObject',
+    'MutableInterpreterObject',
+    'ContextManagerObject',
+
+    'MesonOperator',
+
+    'Disabler',
+    'is_disabled',
+
+    'InterpreterException',
+    'InvalidCode',
+    'InvalidArguments',
+    'SubdirDoneRequest',
+    'ContinueRequest',
+    'BreakRequest',
+
+    'default_resolve_key',
+    'flatten',
+    'resolve_second_level_holders',
+
+    'noPosargs',
+    'noKwargs',
+    'stringArgs',
+    'noArgsFlattening',
+    'noSecondLevelHolderResolving',
+    'unholder_return',
+    'disablerIfNotFound',
+    'permittedKwargs',
+    'typed_operator',
+    'unary_operator',
+    'typed_pos_args',
+    'ContainerTypeInfo',
+    'KwargInfo',
+    'typed_kwargs',
+    'FeatureCheckBase',
+    'FeatureNew',
+    'FeatureDeprecated',
+    'FeatureBroken',
+    'FeatureNewKwargs',
+    'FeatureDeprecatedKwargs',
+
+    'InterpreterBase',
+
+    'SubProject',
+
+    'TV_fw_var',
+    'TV_fw_args',
+    'TV_fw_kwargs',
+    'TV_func',
+    'TYPE_elementary',
+    'TYPE_var',
+    'TYPE_nvar',
+    'TYPE_kwargs',
+    'TYPE_nkwargs',
+    'TYPE_key_resolver',
+    'TYPE_HoldableTypes',
+
+    'HoldableTypes',
+]
+
+from .baseobjects import (
+    InterpreterObject,
+    MesonInterpreterObject,
+    ObjectHolder,
+    IterableObject,
+    MutableInterpreterObject,
+    ContextManagerObject,
+
+    TV_fw_var,
+    TV_fw_args,
+    TV_fw_kwargs,
+    TV_func,
+    TYPE_elementary,
+    TYPE_var,
+    TYPE_nvar,
+    TYPE_kwargs,
+    TYPE_nkwargs,
+    TYPE_key_resolver,
+    TYPE_HoldableTypes,
+
+    SubProject,
+
+    HoldableTypes,
+)
+
+from .decorators import (
+    noPosargs,
+    noKwargs,
+    stringArgs,
+    noArgsFlattening,
+    noSecondLevelHolderResolving,
+    unholder_return,
+    disablerIfNotFound,
+    permittedKwargs,
+    typed_pos_args,
+    ContainerTypeInfo,
+    KwargInfo,
+    typed_operator,
+    unary_operator,
+    typed_kwargs,
+    FeatureCheckBase,
+    FeatureNew,
+    FeatureDeprecated,
+    FeatureBroken,
+    FeatureNewKwargs,
+    FeatureDeprecatedKwargs,
+)
+
+from .exceptions import (
+    InterpreterException,
+    InvalidCode,
+    InvalidArguments,
+    SubdirDoneRequest,
+    ContinueRequest,
+    BreakRequest,
+)
+
+from .disabler import Disabler, is_disabled
+from .helpers import default_resolve_key, flatten, resolve_second_level_holders
+from .interpreterbase import InterpreterBase
+from .operator import MesonOperator
diff --git a/vendored-meson/meson/mesonbuild/interpreterbase/_unholder.py b/vendored-meson/meson/mesonbuild/interpreterbase/_unholder.py
new file mode 100644
index 000000000000..4f1edc1027f1
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreterbase/_unholder.py
@@ -0,0 +1,35 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import typing as T
+
+from .baseobjects import InterpreterObject, MesonInterpreterObject, ObjectHolder, HoldableTypes
+from .exceptions import InvalidArguments
+from ..mesonlib import HoldableObject, MesonBugException
+
+if T.TYPE_CHECKING:
+    from .baseobjects import TYPE_var
+
+def _unholder(obj: InterpreterObject) -> TYPE_var:
+    if isinstance(obj, ObjectHolder):
+        assert isinstance(obj.held_object, HoldableTypes)
+        return obj.held_object
+    elif isinstance(obj, MesonInterpreterObject):
+        return obj
+    elif isinstance(obj, HoldableObject):
+        raise MesonBugException(f'Argument {obj} of type {type(obj).__name__} is not held by an ObjectHolder.')
+    elif isinstance(obj, InterpreterObject):
+        raise InvalidArguments(f'Argument {obj} of type {type(obj).__name__} cannot be passed to a method or function')
+    raise MesonBugException(f'Unknown object {obj} of type {type(obj).__name__} in the parameters.')
diff --git a/vendored-meson/meson/mesonbuild/interpreterbase/baseobjects.py b/vendored-meson/meson/mesonbuild/interpreterbase/baseobjects.py
new file mode 100644
index 000000000000..d5b8c947624c
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreterbase/baseobjects.py
@@ -0,0 +1,187 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from .. import mparser
+from .exceptions import InvalidCode, InvalidArguments
+from .helpers import flatten, resolve_second_level_holders
+from .operator import MesonOperator
+from ..mesonlib import HoldableObject, MesonBugException
+import textwrap
+
+import typing as T
+from abc import ABCMeta
+from contextlib import AbstractContextManager
+
+if T.TYPE_CHECKING:
+    from typing_extensions import Protocol
+
+    # Object holders need the actual interpreter
+    from ..interpreter import Interpreter
+
+    __T = T.TypeVar('__T', bound='TYPE_var', contravariant=True)
+
+    class OperatorCall(Protocol[__T]):
+        def __call__(self, other: __T) -> 'TYPE_var': ...
+
+TV_fw_var = T.Union[str, int, bool, list, dict, 'InterpreterObject']
+TV_fw_args = T.List[T.Union[mparser.BaseNode, TV_fw_var]]
+TV_fw_kwargs = T.Dict[str, T.Union[mparser.BaseNode, TV_fw_var]]
+
+TV_func = T.TypeVar('TV_func', bound=T.Callable[..., T.Any])
+
+TYPE_elementary = T.Union[str, int, bool, T.List[T.Any], T.Dict[str, T.Any]]
+TYPE_var = T.Union[TYPE_elementary, HoldableObject, 'MesonInterpreterObject']
+TYPE_nvar = T.Union[TYPE_var, mparser.BaseNode]
+TYPE_kwargs = T.Dict[str, TYPE_var]
+TYPE_nkwargs = T.Dict[str, TYPE_nvar]
+TYPE_key_resolver = T.Callable[[mparser.BaseNode], str]
+
+SubProject = T.NewType('SubProject', str)
+
+class InterpreterObject:
+    def __init__(self, *, subproject: T.Optional['SubProject'] = None) -> None:
+        self.methods: T.Dict[
+            str,
+            T.Callable[[T.List[TYPE_var], TYPE_kwargs], TYPE_var]
+        ] = {}
+        self.operators: T.Dict[MesonOperator, 'OperatorCall'] = {}
+        self.trivial_operators: T.Dict[
+            MesonOperator,
+            T.Tuple[
+                T.Union[T.Type, T.Tuple[T.Type, ...]],
+                'OperatorCall'
+            ]
+        ] = {}
+        # Current node set during a method call. This can be used as location
+        # when printing a warning message during a method call.
+        self.current_node:  mparser.BaseNode = None
+        self.subproject = subproject or SubProject('')
+
+        # Some default operators supported by all objects
+        self.operators.update({
+            MesonOperator.EQUALS: self.op_equals,
+            MesonOperator.NOT_EQUALS: self.op_not_equals,
+        })
+
+    # The type of the object that can be printed to the user
+    def display_name(self) -> str:
+        return type(self).__name__
+
+    def method_call(
+                self,
+                method_name: str,
+                args: T.List[TYPE_var],
+                kwargs: TYPE_kwargs
+            ) -> TYPE_var:
+        if method_name in self.methods:
+            method = self.methods[method_name]
+            if not getattr(method, 'no-args-flattening', False):
+                args = flatten(args)
+            if not getattr(method, 'no-second-level-holder-flattening', False):
+                args, kwargs = resolve_second_level_holders(args, kwargs)
+            return method(args, kwargs)
+        raise InvalidCode(f'Unknown method "{method_name}" in object {self} of type {type(self).__name__}.')
+
+    def operator_call(self, operator: MesonOperator, other: TYPE_var) -> TYPE_var:
+        if operator in self.trivial_operators:
+            op = self.trivial_operators[operator]
+            if op[0] is None and other is not None:
+                raise MesonBugException(f'The unary operator `{operator.value}` of {self.display_name()} was passed the object {other} of type {type(other).__name__}')
+            if op[0] is not None and not isinstance(other, op[0]):
+                raise InvalidArguments(f'The `{operator.value}` operator of {self.display_name()} does not accept objects of type {type(other).__name__} ({other})')
+            return op[1](other)
+        if operator in self.operators:
+            return self.operators[operator](other)
+        raise InvalidCode(f'Object {self} of type {self.display_name()} does not support the `{operator.value}` operator.')
+
+    # Default comparison operator support
+    def _throw_comp_exception(self, other: TYPE_var, opt_type: str) -> T.NoReturn:
+        raise InvalidArguments(textwrap.dedent(
+            f'''
+                Trying to compare values of different types ({self.display_name()}, {type(other).__name__}) using {opt_type}.
+                This was deprecated and undefined behavior previously and is as of 0.60.0 a hard error.
+            '''
+        ))
+
+    def op_equals(self, other: TYPE_var) -> bool:
+        # We use `type(...) == type(...)` here to enforce an *exact* match for comparison. We
+        # don't want comparisons to be possible where `isinstance(derived_obj, type(base_obj))`
+        # would pass because this comparison must never be true: `derived_obj == base_obj`
+        if type(self) != type(other):
+            self._throw_comp_exception(other, '==')
+        return self == other
+
+    def op_not_equals(self, other: TYPE_var) -> bool:
+        if type(self) != type(other):
+            self._throw_comp_exception(other, '!=')
+        return self != other
+
+class MesonInterpreterObject(InterpreterObject):
+    ''' All non-elementary objects and non-object-holders should be derived from this '''
+
+class MutableInterpreterObject:
+    ''' Dummy class to mark the object type as mutable '''
+
+HoldableTypes = (HoldableObject, int, bool, str, list, dict)
+TYPE_HoldableTypes = T.Union[TYPE_elementary, HoldableObject]
+InterpreterObjectTypeVar = T.TypeVar('InterpreterObjectTypeVar', bound=TYPE_HoldableTypes)
+
+class ObjectHolder(InterpreterObject, T.Generic[InterpreterObjectTypeVar]):
+    def __init__(self, obj: InterpreterObjectTypeVar, interpreter: 'Interpreter') -> None:
+        super().__init__(subproject=interpreter.subproject)
+        # This causes some type checkers to assume that obj is a base
+        # HoldableObject, not the specialized type, so only do this assert in
+        # non-type checking situations
+        if not T.TYPE_CHECKING:
+            assert isinstance(obj, HoldableTypes), f'This is a bug: Trying to hold object of type `{type(obj).__name__}` that is not in `{HoldableTypes}`'
+        self.held_object = obj
+        self.interpreter = interpreter
+        self.env = self.interpreter.environment
+
+    # Hide the object holder abstraction from the user
+    def display_name(self) -> str:
+        return type(self.held_object).__name__
+
+    # Override default comparison operators for the held object
+    def op_equals(self, other: TYPE_var) -> bool:
+        # See the comment from InterpreterObject why we are using `type()` here.
+        if type(self.held_object) != type(other):
+            self._throw_comp_exception(other, '==')
+        return self.held_object == other
+
+    def op_not_equals(self, other: TYPE_var) -> bool:
+        if type(self.held_object) != type(other):
+            self._throw_comp_exception(other, '!=')
+        return self.held_object != other
+
+    def __repr__(self) -> str:
+        return f'<[{type(self).__name__}] holds [{type(self.held_object).__name__}]: {self.held_object!r}>'
+
+class IterableObject(metaclass=ABCMeta):
+    '''Base class for all objects that can be iterated over in a foreach loop'''
+
+    def iter_tuple_size(self) -> T.Optional[int]:
+        '''Return the size of the tuple for each iteration. Returns None if only a single value is returned.'''
+        raise MesonBugException(f'iter_tuple_size not implemented for {self.__class__.__name__}')
+
+    def iter_self(self) -> T.Iterator[T.Union[TYPE_var, T.Tuple[TYPE_var, ...]]]:
+        raise MesonBugException(f'iter not implemented for {self.__class__.__name__}')
+
+    def size(self) -> int:
+        raise MesonBugException(f'size not implemented for {self.__class__.__name__}')
+
+class ContextManagerObject(MesonInterpreterObject, AbstractContextManager):
+    def __init__(self, subproject: 'SubProject') -> None:
+        super().__init__(subproject=subproject)
diff --git a/vendored-meson/meson/mesonbuild/interpreterbase/decorators.py b/vendored-meson/meson/mesonbuild/interpreterbase/decorators.py
new file mode 100644
index 000000000000..64e02c27067c
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreterbase/decorators.py
@@ -0,0 +1,833 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from .. import mesonlib, mlog
+from .disabler import Disabler
+from .exceptions import InterpreterException, InvalidArguments
+from ._unholder import _unholder
+
+from dataclasses import dataclass
+from functools import wraps
+import abc
+import itertools
+import copy
+import typing as T
+
+if T.TYPE_CHECKING:
+    from typing_extensions import Protocol
+
+    from .. import mparser
+    from .baseobjects import InterpreterObject, TV_func, TYPE_var, TYPE_kwargs
+    from .interpreterbase import SubProject
+    from .operator import MesonOperator
+
+    _TV_IntegerObject = T.TypeVar('_TV_IntegerObject', bound=InterpreterObject, contravariant=True)
+    _TV_ARG1 = T.TypeVar('_TV_ARG1', bound=TYPE_var, contravariant=True)
+
+    class FN_Operator(Protocol[_TV_IntegerObject, _TV_ARG1]):
+        def __call__(s, self: _TV_IntegerObject, other: _TV_ARG1) -> TYPE_var: ...
+    _TV_FN_Operator = T.TypeVar('_TV_FN_Operator', bound=FN_Operator)
+
+def get_callee_args(wrapped_args: T.Sequence[T.Any]) -> T.Tuple['mparser.BaseNode', T.List['TYPE_var'], 'TYPE_kwargs', 'SubProject']:
+    # First argument could be InterpreterBase, InterpreterObject or ModuleObject.
+    # In the case of a ModuleObject it is the 2nd argument (ModuleState) that
+    # contains the needed information.
+    s = wrapped_args[0]
+    if not hasattr(s, 'current_node'):
+        s = wrapped_args[1]
+    node = s.current_node
+    subproject = s.subproject
+    args = kwargs = None
+    if len(wrapped_args) >= 3:
+        args = wrapped_args[-2]
+        kwargs = wrapped_args[-1]
+    return node, args, kwargs, subproject
+
+def noPosargs(f: TV_func) -> TV_func:
+    @wraps(f)
+    def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+        args = get_callee_args(wrapped_args)[1]
+        if args:
+            raise InvalidArguments('Function does not take positional arguments.')
+        return f(*wrapped_args, **wrapped_kwargs)
+    return T.cast('TV_func', wrapped)
+
+def noKwargs(f: TV_func) -> TV_func:
+    @wraps(f)
+    def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+        kwargs = get_callee_args(wrapped_args)[2]
+        if kwargs:
+            raise InvalidArguments('Function does not take keyword arguments.')
+        return f(*wrapped_args, **wrapped_kwargs)
+    return T.cast('TV_func', wrapped)
+
+def stringArgs(f: TV_func) -> TV_func:
+    @wraps(f)
+    def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+        args = get_callee_args(wrapped_args)[1]
+        if not isinstance(args, list):
+            mlog.debug('Not a list:', str(args))
+            raise InvalidArguments('Argument not a list.')
+        if not all(isinstance(s, str) for s in args):
+            mlog.debug('Element not a string:', str(args))
+            raise InvalidArguments('Arguments must be strings.')
+        return f(*wrapped_args, **wrapped_kwargs)
+    return T.cast('TV_func', wrapped)
+
+def noArgsFlattening(f: TV_func) -> TV_func:
+    setattr(f, 'no-args-flattening', True)  # noqa: B010
+    return f
+
+def noSecondLevelHolderResolving(f: TV_func) -> TV_func:
+    setattr(f, 'no-second-level-holder-flattening', True)  # noqa: B010
+    return f
+
+def unholder_return(f: TV_func) -> T.Callable[..., TYPE_var]:
+    @wraps(f)
+    def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+        res = f(*wrapped_args, **wrapped_kwargs)
+        return _unholder(res)
+    return T.cast('T.Callable[..., TYPE_var]', wrapped)
+
+def disablerIfNotFound(f: TV_func) -> TV_func:
+    @wraps(f)
+    def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+        kwargs = get_callee_args(wrapped_args)[2]
+        disabler = kwargs.pop('disabler', False)
+        ret = f(*wrapped_args, **wrapped_kwargs)
+        if disabler and not ret.found():
+            return Disabler()
+        return ret
+    return T.cast('TV_func', wrapped)
+
+@dataclass(repr=False, eq=False)
+class permittedKwargs:
+    permitted: T.Set[str]
+
+    def __call__(self, f: TV_func) -> TV_func:
+        @wraps(f)
+        def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+            kwargs = get_callee_args(wrapped_args)[2]
+            unknowns = set(kwargs).difference(self.permitted)
+            if unknowns:
+                ustr = ', '.join([f'"{u}"' for u in sorted(unknowns)])
+                raise InvalidArguments(f'Got unknown keyword arguments {ustr}')
+            return f(*wrapped_args, **wrapped_kwargs)
+        return T.cast('TV_func', wrapped)
+
+def typed_operator(operator: MesonOperator,
+                   types: T.Union[T.Type, T.Tuple[T.Type, ...]]) -> T.Callable[['_TV_FN_Operator'], '_TV_FN_Operator']:
+    """Decorator that does type checking for operator calls.
+
+    The principle here is similar to typed_pos_args, however much simpler
+    since only one other object ever is passed
+    """
+    def inner(f: '_TV_FN_Operator') -> '_TV_FN_Operator':
+        @wraps(f)
+        def wrapper(self: 'InterpreterObject', other: TYPE_var) -> TYPE_var:
+            if not isinstance(other, types):
+                raise InvalidArguments(f'The `{operator.value}` of {self.display_name()} does not accept objects of type {type(other).__name__} ({other})')
+            return f(self, other)
+        return T.cast('_TV_FN_Operator', wrapper)
+    return inner
+
+def unary_operator(operator: MesonOperator) -> T.Callable[['_TV_FN_Operator'], '_TV_FN_Operator']:
+    """Decorator that does type checking for unary operator calls.
+
+    This decorator is for unary operators that do not take any other objects.
+    It should be impossible for a user to accidentally break this. Triggering
+    this check always indicates a bug in the Meson interpreter.
+    """
+    def inner(f: '_TV_FN_Operator') -> '_TV_FN_Operator':
+        @wraps(f)
+        def wrapper(self: 'InterpreterObject', other: TYPE_var) -> TYPE_var:
+            if other is not None:
+                raise mesonlib.MesonBugException(f'The unary operator `{operator.value}` of {self.display_name()} was passed the object {other} of type {type(other).__name__}')
+            return f(self, other)
+        return T.cast('_TV_FN_Operator', wrapper)
+    return inner
+
+
+def typed_pos_args(name: str, *types: T.Union[T.Type, T.Tuple[T.Type, ...]],
+                   varargs: T.Optional[T.Union[T.Type, T.Tuple[T.Type, ...]]] = None,
+                   optargs: T.Optional[T.List[T.Union[T.Type, T.Tuple[T.Type, ...]]]] = None,
+                   min_varargs: int = 0, max_varargs: int = 0) -> T.Callable[..., T.Any]:
+    """Decorator that types type checking of positional arguments.
+
+    This supports two different models of optional arguments, the first is the
+    variadic argument model. Variadic arguments are a possibly bounded,
+    possibly unbounded number of arguments of the same type (unions are
+    supported). The second is the standard default value model, in this case
+    a number of optional arguments may be provided, but they are still
+    ordered, and they may have different types.
+
+    This function does not support mixing variadic and default arguments.
+
+    :name: The name of the decorated function (as displayed in error messages)
+    :varargs: They type(s) of any variadic arguments the function takes. If
+        None the function takes no variadic args
+    :min_varargs: the minimum number of variadic arguments taken
+    :max_varargs: the maximum number of variadic arguments taken. 0 means unlimited
+    :optargs: The types of any optional arguments parameters taken. If None
+        then no optional parameters are taken.
+
+    Some examples of usage blow:
+    >>> @typed_pos_args('mod.func', str, (str, int))
+    ... def func(self, state: ModuleState, args: T.Tuple[str, T.Union[str, int]], kwargs: T.Dict[str, T.Any]) -> T.Any:
+    ...     pass
+
+    >>> @typed_pos_args('method', str, varargs=str)
+    ... def method(self, node: BaseNode, args: T.Tuple[str, T.List[str]], kwargs: T.Dict[str, T.Any]) -> T.Any:
+    ...     pass
+
+    >>> @typed_pos_args('method', varargs=str, min_varargs=1)
+    ... def method(self, node: BaseNode, args: T.Tuple[T.List[str]], kwargs: T.Dict[str, T.Any]) -> T.Any:
+    ...     pass
+
+    >>> @typed_pos_args('method', str, optargs=[(str, int), str])
+    ... def method(self, node: BaseNode, args: T.Tuple[str, T.Optional[T.Union[str, int]], T.Optional[str]], kwargs: T.Dict[str, T.Any]) -> T.Any:
+    ...     pass
+
+    When should you chose `typed_pos_args('name', varargs=str,
+    min_varargs=1)` vs `typed_pos_args('name', str, varargs=str)`?
+
+    The answer has to do with the semantics of the function, if all of the
+    inputs are the same type (such as with `files()`) then the former is
+    correct, all of the arguments are string names of files. If the first
+    argument is something else the it should be separated.
+    """
+    def inner(f: TV_func) -> TV_func:
+
+        @wraps(f)
+        def wrapper(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+            args = get_callee_args(wrapped_args)[1]
+
+            # These are implementation programming errors, end users should never see them.
+            assert isinstance(args, list), args
+            assert max_varargs >= 0, 'max_varags cannot be negative'
+            assert min_varargs >= 0, 'min_varags cannot be negative'
+            assert optargs is None or varargs is None, \
+                'varargs and optargs not supported together as this would be ambiguous'
+
+            num_args = len(args)
+            num_types = len(types)
+            a_types = types
+
+            if varargs:
+                min_args = num_types + min_varargs
+                max_args = num_types + max_varargs
+                if max_varargs == 0 and num_args < min_args:
+                    raise InvalidArguments(f'{name} takes at least {min_args} arguments, but got {num_args}.')
+                elif max_varargs != 0 and (num_args < min_args or num_args > max_args):
+                    raise InvalidArguments(f'{name} takes between {min_args} and {max_args} arguments, but got {num_args}.')
+            elif optargs:
+                if num_args < num_types:
+                    raise InvalidArguments(f'{name} takes at least {num_types} arguments, but got {num_args}.')
+                elif num_args > num_types + len(optargs):
+                    raise InvalidArguments(f'{name} takes at most {num_types + len(optargs)} arguments, but got {num_args}.')
+                # Add the number of positional arguments required
+                if num_args > num_types:
+                    diff = num_args - num_types
+                    a_types = tuple(list(types) + list(optargs[:diff]))
+            elif num_args != num_types:
+                raise InvalidArguments(f'{name} takes exactly {num_types} arguments, but got {num_args}.')
+
+            for i, (arg, type_) in enumerate(itertools.zip_longest(args, a_types, fillvalue=varargs), start=1):
+                if not isinstance(arg, type_):
+                    if isinstance(type_, tuple):
+                        shouldbe = 'one of: {}'.format(", ".join(f'"{t.__name__}"' for t in type_))
+                    else:
+                        shouldbe = f'"{type_.__name__}"'
+                    raise InvalidArguments(f'{name} argument {i} was of type "{type(arg).__name__}" but should have been {shouldbe}')
+
+            # Ensure that we're actually passing a tuple.
+            # Depending on what kind of function we're calling the length of
+            # wrapped_args can vary.
+            nargs = list(wrapped_args)
+            i = nargs.index(args)
+            if varargs:
+                # if we have varargs we need to split them into a separate
+                # tuple, as python's typing doesn't understand tuples with
+                # fixed elements and variadic elements, only one or the other.
+                # so in that case we need T.Tuple[int, str, float, T.Tuple[str, ...]]
+                pos = args[:len(types)]
+                var = list(args[len(types):])
+                pos.append(var)
+                nargs[i] = tuple(pos)
+            elif optargs:
+                if num_args < num_types + len(optargs):
+                    diff = num_types + len(optargs) - num_args
+                    nargs[i] = tuple(list(args) + [None] * diff)
+                else:
+                    nargs[i] = args
+            else:
+                nargs[i] = tuple(args)
+            return f(*nargs, **wrapped_kwargs)
+
+        return T.cast('TV_func', wrapper)
+    return inner
+
+
+class ContainerTypeInfo:
+
+    """Container information for keyword arguments.
+
+    For keyword arguments that are containers (list or dict), this class encodes
+    that information.
+
+    :param container: the type of container
+    :param contains: the types the container holds
+    :param pairs: if the container is supposed to be of even length.
+        This is mainly used for interfaces that predate the addition of dictionaries, and use
+        `[key, value, key2, value2]` format.
+    :param allow_empty: Whether this container is allowed to be empty
+        There are some cases where containers not only must be passed, but must
+        not be empty, and other cases where an empty container is allowed.
+    """
+
+    def __init__(self, container: T.Type, contains: T.Union[T.Type, T.Tuple[T.Type, ...]], *,
+                 pairs: bool = False, allow_empty: bool = True):
+        self.container = container
+        self.contains = contains
+        self.pairs = pairs
+        self.allow_empty = allow_empty
+
+    def check(self, value: T.Any) -> bool:
+        """Check that a value is valid.
+
+        :param value: A value to check
+        :return: True if it is valid, False otherwise
+        """
+        if not isinstance(value, self.container):
+            return False
+        iter_ = iter(value.values()) if isinstance(value, dict) else iter(value)
+        if any(not isinstance(i, self.contains) for i in iter_):
+            return False
+        if self.pairs and len(value) % 2 != 0:
+            return False
+        if not value and not self.allow_empty:
+            return False
+        return True
+
+    def check_any(self, value: T.Any) -> bool:
+        """Check a value should emit new/deprecated feature.
+
+        :param value: A value to check
+        :return: True if any of the items in value matches, False otherwise
+        """
+        if not isinstance(value, self.container):
+            return False
+        iter_ = iter(value.values()) if isinstance(value, dict) else iter(value)
+        return any(isinstance(i, self.contains) for i in iter_)
+
+    def description(self) -> str:
+        """Human readable description of this container type.
+
+        :return: string to be printed
+        """
+        container = 'dict' if self.container is dict else 'array'
+        if isinstance(self.contains, tuple):
+            contains = ' | '.join([t.__name__ for t in self.contains])
+        else:
+            contains = self.contains.__name__
+        s = f'{container}[{contains}]'
+        if self.pairs:
+            s += ' that has even size'
+        if not self.allow_empty:
+            s += ' that cannot be empty'
+        return s
+
+_T = T.TypeVar('_T')
+
+class _NULL_T:
+    """Special null type for evolution, this is an implementation detail."""
+
+
+_NULL = _NULL_T()
+
+class KwargInfo(T.Generic[_T]):
+
+    """A description of a keyword argument to a meson function
+
+    This is used to describe a value to the :func:typed_kwargs function.
+
+    :param name: the name of the parameter
+    :param types: A type or tuple of types that are allowed, or a :class:ContainerType
+    :param required: Whether this is a required keyword argument. defaults to False
+    :param listify: If true, then the argument will be listified before being
+        checked. This is useful for cases where the Meson DSL allows a scalar or
+        a container, but internally we only want to work with containers
+    :param default: A default value to use if this isn't set. defaults to None,
+        this may be safely set to a mutable type, as long as that type does not
+        itself contain mutable types, typed_kwargs will copy the default
+    :param since: Meson version in which this argument has been added. defaults to None
+    :param since_message: An extra message to pass to FeatureNew when since is triggered
+    :param deprecated: Meson version in which this argument has been deprecated. defaults to None
+    :param deprecated_message: An extra message to pass to FeatureDeprecated
+        when since is triggered
+    :param validator: A callable that does additional validation. This is mainly
+        intended for cases where a string is expected, but only a few specific
+        values are accepted. Must return None if the input is valid, or a
+        message if the input is invalid
+    :param convertor: A callable that converts the raw input value into a
+        different type. This is intended for cases such as the meson DSL using a
+        string, but the implementation using an Enum. This should not do
+        validation, just conversion.
+    :param deprecated_values: a dictionary mapping a value to the version of
+        meson it was deprecated in. The Value may be any valid value for this
+        argument.
+    :param since_values: a dictionary mapping a value to the version of meson it was
+        added in.
+    :param not_set_warning: A warning message that is logged if the kwarg is not
+        set by the user.
+    """
+    def __init__(self, name: str,
+                 types: T.Union[T.Type[_T], T.Tuple[T.Union[T.Type[_T], ContainerTypeInfo], ...], ContainerTypeInfo],
+                 *, required: bool = False, listify: bool = False,
+                 default: T.Optional[_T] = None,
+                 since: T.Optional[str] = None,
+                 since_message: T.Optional[str] = None,
+                 since_values: T.Optional[T.Dict[T.Union[_T, ContainerTypeInfo, type], T.Union[str, T.Tuple[str, str]]]] = None,
+                 deprecated: T.Optional[str] = None,
+                 deprecated_message: T.Optional[str] = None,
+                 deprecated_values: T.Optional[T.Dict[T.Union[_T, ContainerTypeInfo, type], T.Union[str, T.Tuple[str, str]]]] = None,
+                 validator: T.Optional[T.Callable[[T.Any], T.Optional[str]]] = None,
+                 convertor: T.Optional[T.Callable[[_T], object]] = None,
+                 not_set_warning: T.Optional[str] = None):
+        self.name = name
+        self.types = types
+        self.required = required
+        self.listify = listify
+        self.default = default
+        self.since = since
+        self.since_message = since_message
+        self.since_values = since_values
+        self.deprecated = deprecated
+        self.deprecated_message = deprecated_message
+        self.deprecated_values = deprecated_values
+        self.validator = validator
+        self.convertor = convertor
+        self.not_set_warning = not_set_warning
+
+    def evolve(self, *,
+               name: T.Union[str, _NULL_T] = _NULL,
+               required: T.Union[bool, _NULL_T] = _NULL,
+               listify: T.Union[bool, _NULL_T] = _NULL,
+               default: T.Union[_T, None, _NULL_T] = _NULL,
+               since: T.Union[str, None, _NULL_T] = _NULL,
+               since_message: T.Union[str, None, _NULL_T] = _NULL,
+               since_values: T.Union[T.Dict[T.Union[_T, ContainerTypeInfo, type], T.Union[str, T.Tuple[str, str]]], None, _NULL_T] = _NULL,
+               deprecated: T.Union[str, None, _NULL_T] = _NULL,
+               deprecated_message: T.Union[str, None, _NULL_T] = _NULL,
+               deprecated_values: T.Union[T.Dict[T.Union[_T, ContainerTypeInfo, type], T.Union[str, T.Tuple[str, str]]], None, _NULL_T] = _NULL,
+               validator: T.Union[T.Callable[[_T], T.Optional[str]], None, _NULL_T] = _NULL,
+               convertor: T.Union[T.Callable[[_T], TYPE_var], None, _NULL_T] = _NULL) -> 'KwargInfo':
+        """Create a shallow copy of this KwargInfo, with modifications.
+
+        This allows us to create a new copy of a KwargInfo with modifications.
+        This allows us to use a shared kwarg that implements complex logic, but
+        has slight differences in usage, such as being added to different
+        functions in different versions of Meson.
+
+        The use the _NULL special value here allows us to pass None, which has
+        meaning in many of these cases. _NULL itself is never stored, always
+        being replaced by either the copy in self, or the provided new version.
+        """
+        return type(self)(
+            name if not isinstance(name, _NULL_T) else self.name,
+            self.types,
+            listify=listify if not isinstance(listify, _NULL_T) else self.listify,
+            required=required if not isinstance(required, _NULL_T) else self.required,
+            default=default if not isinstance(default, _NULL_T) else self.default,
+            since=since if not isinstance(since, _NULL_T) else self.since,
+            since_message=since_message if not isinstance(since_message, _NULL_T) else self.since_message,
+            since_values=since_values if not isinstance(since_values, _NULL_T) else self.since_values,
+            deprecated=deprecated if not isinstance(deprecated, _NULL_T) else self.deprecated,
+            deprecated_message=deprecated_message if not isinstance(deprecated_message, _NULL_T) else self.deprecated_message,
+            deprecated_values=deprecated_values if not isinstance(deprecated_values, _NULL_T) else self.deprecated_values,
+            validator=validator if not isinstance(validator, _NULL_T) else self.validator,
+            convertor=convertor if not isinstance(convertor, _NULL_T) else self.convertor,
+        )
+
+
+def typed_kwargs(name: str, *types: KwargInfo, allow_unknown: bool = False) -> T.Callable[..., T.Any]:
+    """Decorator for type checking keyword arguments.
+
+    Used to wrap a meson DSL implementation function, where it checks various
+    things about keyword arguments, including the type, and various other
+    information. For non-required values it sets the value to a default, which
+    means the value will always be provided.
+
+    If type is a :class:ContainerTypeInfo, then the default value will be
+    passed as an argument to the container initializer, making a shallow copy
+
+    :param name: the name of the function, including the object it's attached to
+        (if applicable)
+    :param *types: KwargInfo entries for each keyword argument.
+    """
+    def inner(f: TV_func) -> TV_func:
+
+        def types_description(types_tuple: T.Tuple[T.Union[T.Type, ContainerTypeInfo], ...]) -> str:
+            candidates = []
+            for t in types_tuple:
+                if isinstance(t, ContainerTypeInfo):
+                    candidates.append(t.description())
+                else:
+                    candidates.append(t.__name__)
+            shouldbe = 'one of: ' if len(candidates) > 1 else ''
+            shouldbe += ', '.join(candidates)
+            return shouldbe
+
+        def raw_description(t: object) -> str:
+            """describe a raw type (ie, one that is not a ContainerTypeInfo)."""
+            if isinstance(t, list):
+                if t:
+                    return f"array[{' | '.join(sorted(mesonlib.OrderedSet(type(v).__name__ for v in t)))}]"
+                return 'array[]'
+            elif isinstance(t, dict):
+                if t:
+                    return f"dict[{' | '.join(sorted(mesonlib.OrderedSet(type(v).__name__ for v in t.values())))}]"
+                return 'dict[]'
+            return type(t).__name__
+
+        def check_value_type(types_tuple: T.Tuple[T.Union[T.Type, ContainerTypeInfo], ...],
+                             value: T.Any) -> bool:
+            for t in types_tuple:
+                if isinstance(t, ContainerTypeInfo):
+                    if t.check(value):
+                        return True
+                elif isinstance(value, t):
+                    return True
+            return False
+
+        @wraps(f)
+        def wrapper(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+
+            def emit_feature_change(values: T.Dict[_T, T.Union[str, T.Tuple[str, str]]], feature: T.Union[T.Type['FeatureDeprecated'], T.Type['FeatureNew']]) -> None:
+                for n, version in values.items():
+                    if isinstance(version, tuple):
+                        version, msg = version
+                    else:
+                        msg = None
+
+                    warning: T.Optional[str] = None
+                    if isinstance(n, ContainerTypeInfo):
+                        if n.check_any(value):
+                            warning = f'of type {n.description()}'
+                    elif isinstance(n, type):
+                        if isinstance(value, n):
+                            warning = f'of type {n.__name__}'
+                    elif isinstance(value, list):
+                        if n in value:
+                            warning = f'value "{n}" in list'
+                    elif isinstance(value, dict):
+                        if n in value.keys():
+                            warning = f'value "{n}" in dict keys'
+                    elif n == value:
+                        warning = f'value "{n}"'
+                    if warning:
+                        feature.single_use(f'"{name}" keyword argument "{info.name}" {warning}', version, subproject, msg, location=node)
+
+            node, _, _kwargs, subproject = get_callee_args(wrapped_args)
+            # Cast here, as the convertor function may place something other than a TYPE_var in the kwargs
+            kwargs = T.cast('T.Dict[str, object]', _kwargs)
+
+            if not allow_unknown:
+                all_names = {t.name for t in types}
+                unknowns = set(kwargs).difference(all_names)
+                if unknowns:
+                    ustr = ', '.join([f'"{u}"' for u in sorted(unknowns)])
+                    raise InvalidArguments(f'{name} got unknown keyword arguments {ustr}')
+
+            for info in types:
+                types_tuple = info.types if isinstance(info.types, tuple) else (info.types,)
+                value = kwargs.get(info.name)
+                if value is not None:
+                    if info.since:
+                        feature_name = info.name + ' arg in ' + name
+                        FeatureNew.single_use(feature_name, info.since, subproject, info.since_message, location=node)
+                    if info.deprecated:
+                        feature_name = info.name + ' arg in ' + name
+                        FeatureDeprecated.single_use(feature_name, info.deprecated, subproject, info.deprecated_message, location=node)
+                    if info.listify:
+                        kwargs[info.name] = value = mesonlib.listify(value)
+                    if not check_value_type(types_tuple, value):
+                        shouldbe = types_description(types_tuple)
+                        raise InvalidArguments(f'{name} keyword argument {info.name!r} was of type {raw_description(value)} but should have been {shouldbe}')
+
+                    if info.validator is not None:
+                        msg = info.validator(value)
+                        if msg is not None:
+                            raise InvalidArguments(f'{name} keyword argument "{info.name}" {msg}')
+
+                    if info.deprecated_values is not None:
+                        emit_feature_change(info.deprecated_values, FeatureDeprecated)
+
+                    if info.since_values is not None:
+                        emit_feature_change(info.since_values, FeatureNew)
+
+                elif info.required:
+                    raise InvalidArguments(f'{name} is missing required keyword argument "{info.name}"')
+                else:
+                    # set the value to the default, this ensuring all kwargs are present
+                    # This both simplifies the typing checking and the usage
+                    assert check_value_type(types_tuple, info.default), f'In function {name} default value of {info.name} is not a valid type, got {type(info.default)} expected {types_description(types_tuple)}'
+                    # Create a shallow copy of the container. This allows mutable
+                    # types to be used safely as default values
+                    kwargs[info.name] = copy.copy(info.default)
+                    if info.not_set_warning:
+                        mlog.warning(info.not_set_warning)
+
+                if info.convertor:
+                    kwargs[info.name] = info.convertor(kwargs[info.name])
+
+            return f(*wrapped_args, **wrapped_kwargs)
+        return T.cast('TV_func', wrapper)
+    return inner
+
+
+# This cannot be a dataclass due to https://github.com/python/mypy/issues/5374
+class FeatureCheckBase(metaclass=abc.ABCMeta):
+    "Base class for feature version checks"
+
+    feature_registry: T.ClassVar[T.Dict[str, T.Dict[str, T.Set[T.Tuple[str, T.Optional['mparser.BaseNode']]]]]]
+    emit_notice = False
+    unconditional = False
+
+    def __init__(self, feature_name: str, feature_version: str, extra_message: str = ''):
+        self.feature_name = feature_name  # type: str
+        self.feature_version = feature_version    # type: str
+        self.extra_message = extra_message  # type: str
+
+    @staticmethod
+    def get_target_version(subproject: str) -> str:
+        # Don't do any checks if project() has not been parsed yet
+        if subproject not in mesonlib.project_meson_versions:
+            return ''
+        return mesonlib.project_meson_versions[subproject]
+
+    @staticmethod
+    @abc.abstractmethod
+    def check_version(target_version: str, feature_version: str) -> bool:
+        pass
+
+    def use(self, subproject: 'SubProject', location: T.Optional['mparser.BaseNode'] = None) -> None:
+        tv = self.get_target_version(subproject)
+        # No target version
+        if tv == '' and not self.unconditional:
+            return
+        # Target version is new enough, don't warn
+        if self.check_version(tv, self.feature_version) and not self.emit_notice:
+            return
+        # Feature is too new for target version or we want to emit notices, register it
+        if subproject not in self.feature_registry:
+            self.feature_registry[subproject] = {self.feature_version: set()}
+        register = self.feature_registry[subproject]
+        if self.feature_version not in register:
+            register[self.feature_version] = set()
+
+        feature_key = (self.feature_name, location)
+        if feature_key in register[self.feature_version]:
+            # Don't warn about the same feature multiple times
+            # FIXME: This is needed to prevent duplicate warnings, but also
+            # means we won't warn about a feature used in multiple places.
+            return
+        register[self.feature_version].add(feature_key)
+        # Target version is new enough, don't warn even if it is registered for notice
+        if self.check_version(tv, self.feature_version):
+            return
+        self.log_usage_warning(tv, location)
+
+    @classmethod
+    def report(cls, subproject: str) -> None:
+        if subproject not in cls.feature_registry:
+            return
+        warning_str = cls.get_warning_str_prefix(cls.get_target_version(subproject))
+        notice_str = cls.get_notice_str_prefix(cls.get_target_version(subproject))
+        fv = cls.feature_registry[subproject]
+        tv = cls.get_target_version(subproject)
+        for version in sorted(fv.keys()):
+            message = ', '.join(sorted({f"'{i[0]}'" for i in fv[version]}))
+            if cls.check_version(tv, version):
+                notice_str += '\n * {}: {{{}}}'.format(version, message)
+            else:
+                warning_str += '\n * {}: {{{}}}'.format(version, message)
+        if '\n' in notice_str:
+            mlog.notice(notice_str, fatal=False)
+        if '\n' in warning_str:
+            mlog.warning(warning_str)
+
+    def log_usage_warning(self, tv: str, location: T.Optional['mparser.BaseNode']) -> None:
+        raise InterpreterException('log_usage_warning not implemented')
+
+    @staticmethod
+    def get_warning_str_prefix(tv: str) -> str:
+        raise InterpreterException('get_warning_str_prefix not implemented')
+
+    @staticmethod
+    def get_notice_str_prefix(tv: str) -> str:
+        raise InterpreterException('get_notice_str_prefix not implemented')
+
+    def __call__(self, f: TV_func) -> TV_func:
+        @wraps(f)
+        def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+            node, _, _, subproject = get_callee_args(wrapped_args)
+            if subproject is None:
+                raise AssertionError(f'{wrapped_args!r}')
+            self.use(subproject, node)
+            return f(*wrapped_args, **wrapped_kwargs)
+        return T.cast('TV_func', wrapped)
+
+    @classmethod
+    def single_use(cls, feature_name: str, version: str, subproject: 'SubProject',
+                   extra_message: str = '', location: T.Optional['mparser.BaseNode'] = None) -> None:
+        """Oneline version that instantiates and calls use()."""
+        cls(feature_name, version, extra_message).use(subproject, location)
+
+
+class FeatureNew(FeatureCheckBase):
+    """Checks for new features"""
+
+    # Class variable, shared across all instances
+    #
+    # Format: {subproject: {feature_version: set(feature_names)}}
+    feature_registry = {}
+
+    @staticmethod
+    def check_version(target_version: str, feature_version: str) -> bool:
+        return mesonlib.version_compare_condition_with_min(target_version, feature_version)
+
+    @staticmethod
+    def get_warning_str_prefix(tv: str) -> str:
+        return f'Project specifies a minimum meson_version \'{tv}\' but uses features which were added in newer versions:'
+
+    @staticmethod
+    def get_notice_str_prefix(tv: str) -> str:
+        return ''
+
+    def log_usage_warning(self, tv: str, location: T.Optional['mparser.BaseNode']) -> None:
+        args = [
+            'Project targets', f"'{tv}'",
+            'but uses feature introduced in',
+            f"'{self.feature_version}':",
+            f'{self.feature_name}.',
+        ]
+        if self.extra_message:
+            args.append(self.extra_message)
+        mlog.warning(*args, location=location)
+
+class FeatureDeprecated(FeatureCheckBase):
+    """Checks for deprecated features"""
+
+    # Class variable, shared across all instances
+    #
+    # Format: {subproject: {feature_version: set(feature_names)}}
+    feature_registry = {}
+    emit_notice = True
+
+    @staticmethod
+    def check_version(target_version: str, feature_version: str) -> bool:
+        # For deprecation checks we need to return the inverse of FeatureNew checks
+        return not mesonlib.version_compare_condition_with_min(target_version, feature_version)
+
+    @staticmethod
+    def get_warning_str_prefix(tv: str) -> str:
+        return 'Deprecated features used:'
+
+    @staticmethod
+    def get_notice_str_prefix(tv: str) -> str:
+        return 'Future-deprecated features used:'
+
+    def log_usage_warning(self, tv: str, location: T.Optional['mparser.BaseNode']) -> None:
+        args = [
+            'Project targets', f"'{tv}'",
+            'but uses feature deprecated since',
+            f"'{self.feature_version}':",
+            f'{self.feature_name}.',
+        ]
+        if self.extra_message:
+            args.append(self.extra_message)
+        mlog.warning(*args, location=location)
+
+
+class FeatureBroken(FeatureCheckBase):
+    """Checks for broken features"""
+
+    # Class variable, shared across all instances
+    #
+    # Format: {subproject: {feature_version: set(feature_names)}}
+    feature_registry = {}
+    unconditional = True
+
+    @staticmethod
+    def check_version(target_version: str, feature_version: str) -> bool:
+        # always warn for broken stuff
+        return False
+
+    @staticmethod
+    def get_warning_str_prefix(tv: str) -> str:
+        return 'Broken features used:'
+
+    @staticmethod
+    def get_notice_str_prefix(tv: str) -> str:
+        return ''
+
+    def log_usage_warning(self, tv: str, location: T.Optional['mparser.BaseNode']) -> None:
+        args = [
+            'Project uses feature that was always broken,',
+            'and is now deprecated since',
+            f"'{self.feature_version}':",
+            f'{self.feature_name}.',
+        ]
+        if self.extra_message:
+            args.append(self.extra_message)
+        mlog.deprecation(*args, location=location)
+
+
+# This cannot be a dataclass due to https://github.com/python/mypy/issues/5374
+class FeatureCheckKwargsBase(metaclass=abc.ABCMeta):
+
+    @property
+    @abc.abstractmethod
+    def feature_check_class(self) -> T.Type[FeatureCheckBase]:
+        pass
+
+    def __init__(self, feature_name: str, feature_version: str,
+                 kwargs: T.List[str], extra_message: T.Optional[str] = None):
+        self.feature_name = feature_name
+        self.feature_version = feature_version
+        self.kwargs = kwargs
+        self.extra_message = extra_message
+
+    def __call__(self, f: TV_func) -> TV_func:
+        @wraps(f)
+        def wrapped(*wrapped_args: T.Any, **wrapped_kwargs: T.Any) -> T.Any:
+            node, _, kwargs, subproject = get_callee_args(wrapped_args)
+            if subproject is None:
+                raise AssertionError(f'{wrapped_args!r}')
+            for arg in self.kwargs:
+                if arg not in kwargs:
+                    continue
+                name = arg + ' arg in ' + self.feature_name
+                self.feature_check_class.single_use(
+                        name, self.feature_version, subproject, self.extra_message, node)
+            return f(*wrapped_args, **wrapped_kwargs)
+        return T.cast('TV_func', wrapped)
+
+class FeatureNewKwargs(FeatureCheckKwargsBase):
+    feature_check_class = FeatureNew
+
+class FeatureDeprecatedKwargs(FeatureCheckKwargsBase):
+    feature_check_class = FeatureDeprecated
diff --git a/vendored-meson/meson/mesonbuild/interpreterbase/disabler.py b/vendored-meson/meson/mesonbuild/interpreterbase/disabler.py
new file mode 100644
index 000000000000..182bb625ccc3
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreterbase/disabler.py
@@ -0,0 +1,45 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import typing as T
+
+from .baseobjects import MesonInterpreterObject
+
+if T.TYPE_CHECKING:
+    from .baseobjects import TYPE_var, TYPE_kwargs
+
+class Disabler(MesonInterpreterObject):
+    def method_call(self, method_name: str, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> TYPE_var:
+        if method_name == 'found':
+            return False
+        return Disabler()
+
+def _is_arg_disabled(arg: T.Any) -> bool:
+    if isinstance(arg, Disabler):
+        return True
+    if isinstance(arg, list):
+        for i in arg:
+            if _is_arg_disabled(i):
+                return True
+    return False
+
+def is_disabled(args: T.Sequence[T.Any], kwargs: T.Dict[str, T.Any]) -> bool:
+    for i in args:
+        if _is_arg_disabled(i):
+            return True
+    for i in kwargs.values():
+        if _is_arg_disabled(i):
+            return True
+    return False
diff --git a/vendored-meson/meson/mesonbuild/interpreterbase/exceptions.py b/vendored-meson/meson/mesonbuild/interpreterbase/exceptions.py
new file mode 100644
index 000000000000..cdbe0fb3b059
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreterbase/exceptions.py
@@ -0,0 +1,33 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ..mesonlib import MesonException
+
+class InterpreterException(MesonException):
+    pass
+
+class InvalidCode(InterpreterException):
+    pass
+
+class InvalidArguments(InterpreterException):
+    pass
+
+class SubdirDoneRequest(BaseException):
+    pass
+
+class ContinueRequest(BaseException):
+    pass
+
+class BreakRequest(BaseException):
+    pass
diff --git a/vendored-meson/meson/mesonbuild/interpreterbase/helpers.py b/vendored-meson/meson/mesonbuild/interpreterbase/helpers.py
new file mode 100644
index 000000000000..2196b4e738ee
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreterbase/helpers.py
@@ -0,0 +1,56 @@
+# Copyright 2013-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from .. import mesonlib, mparser
+from .exceptions import InterpreterException
+
+import collections.abc
+import typing as T
+
+if T.TYPE_CHECKING:
+    from .baseobjects import TYPE_var, TYPE_kwargs
+
+def flatten(args: T.Union['TYPE_var', T.List['TYPE_var']]) -> T.List['TYPE_var']:
+    if isinstance(args, mparser.StringNode):
+        assert isinstance(args.value, str)
+        return [args.value]
+    if not isinstance(args, collections.abc.Sequence):
+        return [args]
+    result: T.List['TYPE_var'] = []
+    for a in args:
+        if isinstance(a, list):
+            rest = flatten(a)
+            result = result + rest
+        elif isinstance(a, mparser.StringNode):
+            result.append(a.value)
+        else:
+            result.append(a)
+    return result
+
+def resolve_second_level_holders(args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> T.Tuple[T.List['TYPE_var'], 'TYPE_kwargs']:
+    def resolver(arg: 'TYPE_var') -> 'TYPE_var':
+        if isinstance(arg, list):
+            return [resolver(x) for x in arg]
+        if isinstance(arg, dict):
+            return {k: resolver(v) for k, v in arg.items()}
+        if isinstance(arg, mesonlib.SecondLevelHolder):
+            return arg.get_default_object()
+        return arg
+    return [resolver(x) for x in args], {k: resolver(v) for k, v in kwargs.items()}
+
+def default_resolve_key(key: mparser.BaseNode) -> str:
+    if not isinstance(key, mparser.IdNode):
+        raise InterpreterException('Invalid kwargs format.')
+    return key.value
diff --git a/vendored-meson/meson/mesonbuild/interpreterbase/interpreterbase.py b/vendored-meson/meson/mesonbuild/interpreterbase/interpreterbase.py
new file mode 100644
index 000000000000..5f854d0fae27
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreterbase/interpreterbase.py
@@ -0,0 +1,666 @@
+# Copyright 2016-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+from __future__ import annotations
+
+from .. import environment, mparser, mesonlib
+
+from .baseobjects import (
+    InterpreterObject,
+    MesonInterpreterObject,
+    MutableInterpreterObject,
+    InterpreterObjectTypeVar,
+    ObjectHolder,
+    IterableObject,
+    ContextManagerObject,
+
+    HoldableTypes,
+)
+
+from .exceptions import (
+    BreakRequest,
+    ContinueRequest,
+    InterpreterException,
+    InvalidArguments,
+    InvalidCode,
+    MesonException,
+    SubdirDoneRequest,
+)
+
+from .decorators import FeatureNew
+from .disabler import Disabler, is_disabled
+from .helpers import default_resolve_key, flatten, resolve_second_level_holders
+from .operator import MesonOperator
+from ._unholder import _unholder
+
+import os, copy, re, pathlib
+import typing as T
+import textwrap
+
+if T.TYPE_CHECKING:
+    from .baseobjects import SubProject, TYPE_kwargs, TYPE_var
+    from ..interpreter import Interpreter
+
+    HolderMapType = T.Dict[
+        T.Union[
+            T.Type[mesonlib.HoldableObject],
+            T.Type[int],
+            T.Type[bool],
+            T.Type[str],
+            T.Type[list],
+            T.Type[dict],
+        ],
+        # For some reason, this has to be a callable and can't just be ObjectHolder[InterpreterObjectTypeVar]
+        T.Callable[[InterpreterObjectTypeVar, 'Interpreter'], ObjectHolder[InterpreterObjectTypeVar]]
+    ]
+
+    FunctionType = T.Dict[
+        str,
+        T.Callable[[mparser.BaseNode, T.List[TYPE_var], T.Dict[str, TYPE_var]], TYPE_var]
+    ]
+
+
+class InvalidCodeOnVoid(InvalidCode):
+
+    def __init__(self, op_type: str) -> None:
+        super().__init__(f'Cannot perform {op_type!r} operation on void statement.')
+
+
+class InterpreterBase:
+    def __init__(self, source_root: str, subdir: str, subproject: 'SubProject'):
+        self.source_root = source_root
+        self.funcs: FunctionType = {}
+        self.builtin: T.Dict[str, InterpreterObject] = {}
+        # Holder maps store a mapping from an HoldableObject to a class ObjectHolder
+        self.holder_map: HolderMapType = {}
+        self.bound_holder_map: HolderMapType = {}
+        self.subdir = subdir
+        self.root_subdir = subdir
+        self.subproject = subproject
+        self.variables: T.Dict[str, InterpreterObject] = {}
+        self.argument_depth = 0
+        self.current_lineno = -1
+        # Current node set during a function call. This can be used as location
+        # when printing a warning message during a method call.
+        self.current_node = None  # type: mparser.BaseNode
+        # This is set to `version_string` when this statement is evaluated:
+        # meson.version().compare_version(version_string)
+        # If it was part of a if-clause, it is used to temporally override the
+        # current meson version target within that if-block.
+        self.tmp_meson_version = None # type: T.Optional[str]
+
+    def handle_meson_version_from_ast(self, strict: bool = True) -> None:
+        # do nothing in an AST interpreter
+        return
+
+    def load_root_meson_file(self) -> None:
+        mesonfile = os.path.join(self.source_root, self.subdir, environment.build_filename)
+        if not os.path.isfile(mesonfile):
+            raise InvalidArguments(f'Missing Meson file in {mesonfile}')
+        with open(mesonfile, encoding='utf-8') as mf:
+            code = mf.read()
+        if code.isspace():
+            raise InvalidCode('Builder file is empty.')
+        assert isinstance(code, str)
+        try:
+            self.ast = mparser.Parser(code, mesonfile).parse()
+            self.handle_meson_version_from_ast()
+        except mparser.ParseException as me:
+            me.file = mesonfile
+            # try to detect parser errors from new syntax added by future
+            # meson versions, and just tell the user to update meson
+            self.ast = me.ast
+            self.handle_meson_version_from_ast()
+            raise me
+
+    def parse_project(self) -> None:
+        """
+        Parses project() and initializes languages, compilers etc. Do this
+        early because we need this before we parse the rest of the AST.
+        """
+        self.evaluate_codeblock(self.ast, end=1)
+
+    def sanity_check_ast(self) -> None:
+        if not isinstance(self.ast, mparser.CodeBlockNode):
+            raise InvalidCode('AST is of invalid type. Possibly a bug in the parser.')
+        if not self.ast.lines:
+            raise InvalidCode('No statements in code.')
+        first = self.ast.lines[0]
+        if not isinstance(first, mparser.FunctionNode) or first.func_name != 'project':
+            p = pathlib.Path(self.source_root).resolve()
+            found = p
+            for parent in p.parents:
+                if (parent / 'meson.build').is_file():
+                    with open(parent / 'meson.build', encoding='utf-8') as f:
+                        if f.readline().startswith('project('):
+                            found = parent
+                            break
+                else:
+                    break
+
+            error = 'first statement must be a call to project()'
+            if found != p:
+                raise InvalidCode(f'Not the project root: {error}\n\nDid you mean to run meson from the directory: "{found}"?')
+            else:
+                raise InvalidCode(f'Invalid source tree: {error}')
+
+    def run(self) -> None:
+        # Evaluate everything after the first line, which is project() because
+        # we already parsed that in self.parse_project()
+        try:
+            self.evaluate_codeblock(self.ast, start=1)
+        except SubdirDoneRequest:
+            pass
+
+    def evaluate_codeblock(self, node: mparser.CodeBlockNode, start: int = 0, end: T.Optional[int] = None) -> None:
+        if node is None:
+            return
+        if not isinstance(node, mparser.CodeBlockNode):
+            e = InvalidCode('Tried to execute a non-codeblock. Possibly a bug in the parser.')
+            e.lineno = node.lineno
+            e.colno = node.colno
+            raise e
+        statements = node.lines[start:end]
+        i = 0
+        while i < len(statements):
+            cur = statements[i]
+            try:
+                self.current_lineno = cur.lineno
+                self.evaluate_statement(cur)
+            except Exception as e:
+                if getattr(e, 'lineno', None) is None:
+                    # We are doing the equivalent to setattr here and mypy does not like it
+                    # NOTE: self.current_node is continually updated during processing
+                    e.lineno = self.current_node.lineno                                               # type: ignore
+                    e.colno = self.current_node.colno                                                 # type: ignore
+                    e.file = os.path.join(self.source_root, self.subdir, environment.build_filename)  # type: ignore
+                raise e
+            i += 1 # In THE FUTURE jump over blocks and stuff.
+
+    def evaluate_statement(self, cur: mparser.BaseNode) -> T.Optional[InterpreterObject]:
+        self.current_node = cur
+        if isinstance(cur, mparser.FunctionNode):
+            return self.function_call(cur)
+        elif isinstance(cur, mparser.AssignmentNode):
+            self.assignment(cur)
+        elif isinstance(cur, mparser.MethodNode):
+            return self.method_call(cur)
+        elif isinstance(cur, mparser.StringNode):
+            return self._holderify(cur.value)
+        elif isinstance(cur, mparser.BooleanNode):
+            return self._holderify(cur.value)
+        elif isinstance(cur, mparser.IfClauseNode):
+            return self.evaluate_if(cur)
+        elif isinstance(cur, mparser.IdNode):
+            return self.get_variable(cur.value)
+        elif isinstance(cur, mparser.ComparisonNode):
+            return self.evaluate_comparison(cur)
+        elif isinstance(cur, mparser.ArrayNode):
+            return self.evaluate_arraystatement(cur)
+        elif isinstance(cur, mparser.DictNode):
+            return self.evaluate_dictstatement(cur)
+        elif isinstance(cur, mparser.NumberNode):
+            return self._holderify(cur.value)
+        elif isinstance(cur, mparser.AndNode):
+            return self.evaluate_andstatement(cur)
+        elif isinstance(cur, mparser.OrNode):
+            return self.evaluate_orstatement(cur)
+        elif isinstance(cur, mparser.NotNode):
+            return self.evaluate_notstatement(cur)
+        elif isinstance(cur, mparser.UMinusNode):
+            return self.evaluate_uminusstatement(cur)
+        elif isinstance(cur, mparser.ArithmeticNode):
+            return self.evaluate_arithmeticstatement(cur)
+        elif isinstance(cur, mparser.ForeachClauseNode):
+            self.evaluate_foreach(cur)
+        elif isinstance(cur, mparser.PlusAssignmentNode):
+            self.evaluate_plusassign(cur)
+        elif isinstance(cur, mparser.IndexNode):
+            return self.evaluate_indexing(cur)
+        elif isinstance(cur, mparser.TernaryNode):
+            return self.evaluate_ternary(cur)
+        elif isinstance(cur, mparser.FormatStringNode):
+            if isinstance(cur, mparser.MultilineFormatStringNode):
+                return self.evaluate_multiline_fstring(cur)
+            else:
+                return self.evaluate_fstring(cur)
+        elif isinstance(cur, mparser.ContinueNode):
+            raise ContinueRequest()
+        elif isinstance(cur, mparser.BreakNode):
+            raise BreakRequest()
+        elif isinstance(cur, mparser.TestCaseClauseNode):
+            return self.evaluate_testcase(cur)
+        else:
+            raise InvalidCode("Unknown statement.")
+        return None
+
+    def evaluate_arraystatement(self, cur: mparser.ArrayNode) -> InterpreterObject:
+        (arguments, kwargs) = self.reduce_arguments(cur.args)
+        if len(kwargs) > 0:
+            raise InvalidCode('Keyword arguments are invalid in array construction.')
+        return self._holderify([_unholder(x) for x in arguments])
+
+    @FeatureNew('dict', '0.47.0')
+    def evaluate_dictstatement(self, cur: mparser.DictNode) -> InterpreterObject:
+        def resolve_key(key: mparser.BaseNode) -> str:
+            if not isinstance(key, mparser.StringNode):
+                FeatureNew.single_use('Dictionary entry using non literal key', '0.53.0', self.subproject)
+            key_holder = self.evaluate_statement(key)
+            if key_holder is None:
+                raise InvalidArguments('Key cannot be void.')
+            str_key = _unholder(key_holder)
+            if not isinstance(str_key, str):
+                raise InvalidArguments('Key must be a string')
+            return str_key
+        arguments, kwargs = self.reduce_arguments(cur.args, key_resolver=resolve_key, duplicate_key_error='Duplicate dictionary key: {}')
+        assert not arguments
+        return self._holderify({k: _unholder(v) for k, v in kwargs.items()})
+
+    def evaluate_notstatement(self, cur: mparser.NotNode) -> InterpreterObject:
+        v = self.evaluate_statement(cur.value)
+        if v is None:
+            raise InvalidCodeOnVoid('not')
+        if isinstance(v, Disabler):
+            return v
+        return self._holderify(v.operator_call(MesonOperator.NOT, None))
+
+    def evaluate_if(self, node: mparser.IfClauseNode) -> T.Optional[Disabler]:
+        assert isinstance(node, mparser.IfClauseNode)
+        for i in node.ifs:
+            # Reset self.tmp_meson_version to know if it gets set during this
+            # statement evaluation.
+            self.tmp_meson_version = None
+            result = self.evaluate_statement(i.condition)
+            if result is None:
+                raise InvalidCodeOnVoid('if')
+            if isinstance(result, Disabler):
+                return result
+            if not isinstance(result, InterpreterObject):
+                raise mesonlib.MesonBugException(f'Argument to if ({result}) is not an InterpreterObject but {type(result).__name__}.')
+            res = result.operator_call(MesonOperator.BOOL, None)
+            if not isinstance(res, bool):
+                raise InvalidCode(f'If clause {result!r} does not evaluate to true or false.')
+            if res:
+                prev_meson_version = mesonlib.project_meson_versions[self.subproject]
+                if self.tmp_meson_version:
+                    mesonlib.project_meson_versions[self.subproject] = self.tmp_meson_version
+                try:
+                    self.evaluate_codeblock(i.block)
+                finally:
+                    mesonlib.project_meson_versions[self.subproject] = prev_meson_version
+                return None
+        if not isinstance(node.elseblock, mparser.EmptyNode):
+            self.evaluate_codeblock(node.elseblock)
+        return None
+
+    def evaluate_testcase(self, node: mparser.TestCaseClauseNode) -> T.Optional[Disabler]:
+        result = self.evaluate_statement(node.condition)
+        if isinstance(result, Disabler):
+            return result
+        if not isinstance(result, ContextManagerObject):
+            raise InvalidCode(f'testcase clause {result!r} does not evaluate to a context manager.')
+        with result:
+            self.evaluate_codeblock(node.block)
+        return None
+
+    def evaluate_comparison(self, node: mparser.ComparisonNode) -> InterpreterObject:
+        val1 = self.evaluate_statement(node.left)
+        if val1 is None:
+            raise MesonException('Cannot compare a void statement on the left-hand side')
+        if isinstance(val1, Disabler):
+            return val1
+        val2 = self.evaluate_statement(node.right)
+        if val2 is None:
+            raise MesonException('Cannot compare a void statement on the right-hand side')
+        if isinstance(val2, Disabler):
+            return val2
+
+        # New code based on InterpreterObjects
+        operator = {
+            'in': MesonOperator.IN,
+            'notin': MesonOperator.NOT_IN,
+            '==': MesonOperator.EQUALS,
+            '!=': MesonOperator.NOT_EQUALS,
+            '>': MesonOperator.GREATER,
+            '<': MesonOperator.LESS,
+            '>=': MesonOperator.GREATER_EQUALS,
+            '<=': MesonOperator.LESS_EQUALS,
+        }[node.ctype]
+
+        # Check if the arguments should be reversed for simplicity (this essentially converts `in` to `contains`)
+        if operator in (MesonOperator.IN, MesonOperator.NOT_IN):
+            val1, val2 = val2, val1
+
+        val1.current_node = node
+        return self._holderify(val1.operator_call(operator, _unholder(val2)))
+
+    def evaluate_andstatement(self, cur: mparser.AndNode) -> InterpreterObject:
+        l = self.evaluate_statement(cur.left)
+        if l is None:
+            raise MesonException('Cannot compare a void statement on the left-hand side')
+        if isinstance(l, Disabler):
+            return l
+        l_bool = l.operator_call(MesonOperator.BOOL, None)
+        if not l_bool:
+            return self._holderify(l_bool)
+        r = self.evaluate_statement(cur.right)
+        if r is None:
+            raise MesonException('Cannot compare a void statement on the right-hand side')
+        if isinstance(r, Disabler):
+            return r
+        return self._holderify(r.operator_call(MesonOperator.BOOL, None))
+
+    def evaluate_orstatement(self, cur: mparser.OrNode) -> InterpreterObject:
+        l = self.evaluate_statement(cur.left)
+        if l is None:
+            raise MesonException('Cannot compare a void statement on the left-hand side')
+        if isinstance(l, Disabler):
+            return l
+        l_bool = l.operator_call(MesonOperator.BOOL, None)
+        if l_bool:
+            return self._holderify(l_bool)
+        r = self.evaluate_statement(cur.right)
+        if r is None:
+            raise MesonException('Cannot compare a void statement on the right-hand side')
+        if isinstance(r, Disabler):
+            return r
+        return self._holderify(r.operator_call(MesonOperator.BOOL, None))
+
+    def evaluate_uminusstatement(self, cur: mparser.UMinusNode) -> InterpreterObject:
+        v = self.evaluate_statement(cur.value)
+        if v is None:
+            raise InvalidCodeOnVoid('unary minus')
+        if isinstance(v, Disabler):
+            return v
+        v.current_node = cur
+        return self._holderify(v.operator_call(MesonOperator.UMINUS, None))
+
+    def evaluate_arithmeticstatement(self, cur: mparser.ArithmeticNode) -> InterpreterObject:
+        l = self.evaluate_statement(cur.left)
+        if isinstance(l, Disabler):
+            return l
+        r = self.evaluate_statement(cur.right)
+        if isinstance(r, Disabler):
+            return r
+        if l is None or r is None:
+            raise InvalidCodeOnVoid(cur.operation)
+
+        mapping: T.Dict[str, MesonOperator] = {
+            'add': MesonOperator.PLUS,
+            'sub': MesonOperator.MINUS,
+            'mul': MesonOperator.TIMES,
+            'div': MesonOperator.DIV,
+            'mod': MesonOperator.MOD,
+        }
+        l.current_node = cur
+        res = l.operator_call(mapping[cur.operation], _unholder(r))
+        return self._holderify(res)
+
+    def evaluate_ternary(self, node: mparser.TernaryNode) -> T.Optional[InterpreterObject]:
+        assert isinstance(node, mparser.TernaryNode)
+        result = self.evaluate_statement(node.condition)
+        if result is None:
+            raise MesonException('Cannot use a void statement as condition for ternary operator.')
+        if isinstance(result, Disabler):
+            return result
+        result.current_node = node
+        result_bool = result.operator_call(MesonOperator.BOOL, None)
+        if result_bool:
+            return self.evaluate_statement(node.trueblock)
+        else:
+            return self.evaluate_statement(node.falseblock)
+
+    @FeatureNew('multiline format strings', '0.63.0')
+    def evaluate_multiline_fstring(self, node: mparser.MultilineFormatStringNode) -> InterpreterObject:
+        return self.evaluate_fstring(node)
+
+    @FeatureNew('format strings', '0.58.0')
+    def evaluate_fstring(self, node: mparser.FormatStringNode) -> InterpreterObject:
+        assert isinstance(node, mparser.FormatStringNode)
+
+        def replace(match: T.Match[str]) -> str:
+            var = str(match.group(1))
+            try:
+                val = _unholder(self.variables[var])
+                if not isinstance(val, (str, int, float, bool)):
+                    raise InvalidCode(f'Identifier "{var}" does not name a formattable variable ' +
+                                      '(has to be an integer, a string, a floating point number or a boolean).')
+
+                return str(val)
+            except KeyError:
+                raise InvalidCode(f'Identifier "{var}" does not name a variable.')
+
+        res = re.sub(r'@([_a-zA-Z][_0-9a-zA-Z]*)@', replace, node.value)
+        return self._holderify(res)
+
+    def evaluate_foreach(self, node: mparser.ForeachClauseNode) -> None:
+        assert isinstance(node, mparser.ForeachClauseNode)
+        items = self.evaluate_statement(node.items)
+        if not isinstance(items, IterableObject):
+            raise InvalidArguments('Items of foreach loop do not support iterating')
+
+        tsize = items.iter_tuple_size()
+        if len(node.varnames) != (tsize or 1):
+            raise InvalidArguments(f'Foreach expects exactly {tsize or 1} variables for iterating over objects of type {items.display_name()}')
+
+        for i in items.iter_self():
+            if tsize is None:
+                if isinstance(i, tuple):
+                    raise mesonlib.MesonBugException(f'Iteration of {items} returned a tuple even though iter_tuple_size() is None')
+                self.set_variable(node.varnames[0], self._holderify(i))
+            else:
+                if not isinstance(i, tuple):
+                    raise mesonlib.MesonBugException(f'Iteration of {items} did not return a tuple even though iter_tuple_size() is {tsize}')
+                if len(i) != tsize:
+                    raise mesonlib.MesonBugException(f'Iteration of {items} did not return a tuple even though iter_tuple_size() is {tsize}')
+                for j in range(tsize):
+                    self.set_variable(node.varnames[j], self._holderify(i[j]))
+            try:
+                self.evaluate_codeblock(node.block)
+            except ContinueRequest:
+                continue
+            except BreakRequest:
+                break
+
+    def evaluate_plusassign(self, node: mparser.PlusAssignmentNode) -> None:
+        assert isinstance(node, mparser.PlusAssignmentNode)
+        varname = node.var_name
+        addition = self.evaluate_statement(node.value)
+        if addition is None:
+            raise InvalidCodeOnVoid('plus assign')
+
+        # Remember that all variables are immutable. We must always create a
+        # full new variable and then assign it.
+        old_variable = self.get_variable(varname)
+        old_variable.current_node = node
+        new_value = self._holderify(old_variable.operator_call(MesonOperator.PLUS, _unholder(addition)))
+        self.set_variable(varname, new_value)
+
+    def evaluate_indexing(self, node: mparser.IndexNode) -> InterpreterObject:
+        assert isinstance(node, mparser.IndexNode)
+        iobject = self.evaluate_statement(node.iobject)
+        if iobject is None:
+            raise InterpreterException('Tried to evaluate indexing on void.')
+        if isinstance(iobject, Disabler):
+            return iobject
+        index_holder = self.evaluate_statement(node.index)
+        if index_holder is None:
+            raise InvalidArguments('Cannot use void statement as index.')
+        index = _unholder(index_holder)
+
+        iobject.current_node = node
+        return self._holderify(iobject.operator_call(MesonOperator.INDEX, index))
+
+    def function_call(self, node: mparser.FunctionNode) -> T.Optional[InterpreterObject]:
+        func_name = node.func_name
+        (h_posargs, h_kwargs) = self.reduce_arguments(node.args)
+        (posargs, kwargs) = self._unholder_args(h_posargs, h_kwargs)
+        if is_disabled(posargs, kwargs) and func_name not in {'get_variable', 'set_variable', 'unset_variable', 'is_disabler'}:
+            return Disabler()
+        if func_name in self.funcs:
+            func = self.funcs[func_name]
+            func_args = posargs
+            if not getattr(func, 'no-args-flattening', False):
+                func_args = flatten(posargs)
+            if not getattr(func, 'no-second-level-holder-flattening', False):
+                func_args, kwargs = resolve_second_level_holders(func_args, kwargs)
+            self.current_node = node
+            res = func(node, func_args, kwargs)
+            return self._holderify(res) if res is not None else None
+        else:
+            self.unknown_function_called(func_name)
+            return None
+
+    def method_call(self, node: mparser.MethodNode) -> T.Optional[InterpreterObject]:
+        invocable = node.source_object
+        obj: T.Optional[InterpreterObject]
+        if isinstance(invocable, mparser.IdNode):
+            object_display_name = f'variable "{invocable.value}"'
+            obj = self.get_variable(invocable.value)
+        else:
+            object_display_name = invocable.__class__.__name__
+            obj = self.evaluate_statement(invocable)
+        method_name = node.name
+        (h_args, h_kwargs) = self.reduce_arguments(node.args)
+        (args, kwargs) = self._unholder_args(h_args, h_kwargs)
+        if is_disabled(args, kwargs):
+            return Disabler()
+        if not isinstance(obj, InterpreterObject):
+            raise InvalidArguments(f'{object_display_name} is not callable.')
+        # TODO: InterpreterBase **really** shouldn't be in charge of checking this
+        if method_name == 'extract_objects':
+            if isinstance(obj, ObjectHolder):
+                self.validate_extraction(obj.held_object)
+            elif not isinstance(obj, Disabler):
+                raise InvalidArguments(f'Invalid operation "extract_objects" on {object_display_name} of type {type(obj).__name__}')
+        obj.current_node = self.current_node = node
+        res = obj.method_call(method_name, args, kwargs)
+        return self._holderify(res) if res is not None else None
+
+    def _holderify(self, res: T.Union[TYPE_var, InterpreterObject]) -> InterpreterObject:
+        if isinstance(res, HoldableTypes):
+            # Always check for an exact match first.
+            cls = self.holder_map.get(type(res), None)
+            if cls is not None:
+                # Casts to Interpreter are required here since an assertion would
+                # not work for the `ast` module.
+                return cls(res, T.cast('Interpreter', self))
+            # Try the boundary types next.
+            for typ, cls in self.bound_holder_map.items():
+                if isinstance(res, typ):
+                    return cls(res, T.cast('Interpreter', self))
+            raise mesonlib.MesonBugException(f'Object {res} of type {type(res).__name__} is neither in self.holder_map nor self.bound_holder_map.')
+        elif isinstance(res, ObjectHolder):
+            raise mesonlib.MesonBugException(f'Returned object {res} of type {type(res).__name__} is an object holder.')
+        elif isinstance(res, MesonInterpreterObject):
+            return res
+        raise mesonlib.MesonBugException(f'Unknown returned object {res} of type {type(res).__name__} in the parameters.')
+
+    def _unholder_args(self,
+                       args: T.List[InterpreterObject],
+                       kwargs: T.Dict[str, InterpreterObject]) -> T.Tuple[T.List[TYPE_var], TYPE_kwargs]:
+        return [_unholder(x) for x in args], {k: _unholder(v) for k, v in kwargs.items()}
+
+    def unknown_function_called(self, func_name: str) -> None:
+        raise InvalidCode(f'Unknown function "{func_name}".')
+
+    def reduce_arguments(
+                self,
+                args: mparser.ArgumentNode,
+                key_resolver: T.Callable[[mparser.BaseNode], str] = default_resolve_key,
+                duplicate_key_error: T.Optional[str] = None,
+            ) -> T.Tuple[
+                T.List[InterpreterObject],
+                T.Dict[str, InterpreterObject]
+            ]:
+        assert isinstance(args, mparser.ArgumentNode)
+        if args.incorrect_order():
+            raise InvalidArguments('All keyword arguments must be after positional arguments.')
+        self.argument_depth += 1
+        reduced_pos = [self.evaluate_statement(arg) for arg in args.arguments]
+        if any(x is None for x in reduced_pos):
+            raise InvalidArguments('At least one value in the arguments is void.')
+        reduced_kw: T.Dict[str, InterpreterObject] = {}
+        for key, val in args.kwargs.items():
+            reduced_key = key_resolver(key)
+            assert isinstance(val, mparser.BaseNode)
+            reduced_val = self.evaluate_statement(val)
+            if reduced_val is None:
+                raise InvalidArguments(f'Value of key {reduced_key} is void.')
+            self.current_node = key
+            if duplicate_key_error and reduced_key in reduced_kw:
+                raise InvalidArguments(duplicate_key_error.format(reduced_key))
+            reduced_kw[reduced_key] = reduced_val
+        self.argument_depth -= 1
+        final_kw = self.expand_default_kwargs(reduced_kw)
+        return reduced_pos, final_kw
+
+    def expand_default_kwargs(self, kwargs: T.Dict[str, T.Optional[InterpreterObject]]) -> T.Dict[str, T.Optional[InterpreterObject]]:
+        if 'kwargs' not in kwargs:
+            return kwargs
+        to_expand = _unholder(kwargs.pop('kwargs'))
+        if not isinstance(to_expand, dict):
+            raise InterpreterException('Value of "kwargs" must be dictionary.')
+        if 'kwargs' in to_expand:
+            raise InterpreterException('Kwargs argument must not contain a "kwargs" entry. Points for thinking meta, though. :P')
+        for k, v in to_expand.items():
+            if k in kwargs:
+                raise InterpreterException(f'Entry "{k}" defined both as a keyword argument and in a "kwarg" entry.')
+            kwargs[k] = self._holderify(v)
+        return kwargs
+
+    def assignment(self, node: mparser.AssignmentNode) -> None:
+        assert isinstance(node, mparser.AssignmentNode)
+        if self.argument_depth != 0:
+            raise InvalidArguments(textwrap.dedent('''\
+                Tried to assign values inside an argument list.
+                To specify a keyword argument, use : instead of =.
+            '''))
+        var_name = node.var_name
+        if not isinstance(var_name, str):
+            raise InvalidArguments('Tried to assign value to a non-variable.')
+        value = self.evaluate_statement(node.value)
+        # For mutable objects we need to make a copy on assignment
+        if isinstance(value, MutableInterpreterObject):
+            value = copy.deepcopy(value)
+        self.set_variable(var_name, value)
+
+    def set_variable(self, varname: str, variable: T.Union[TYPE_var, InterpreterObject], *, holderify: bool = False) -> None:
+        if variable is None:
+            raise InvalidCode('Can not assign void to variable.')
+        if holderify:
+            variable = self._holderify(variable)
+        else:
+            # Ensure that we are always storing ObjectHolders
+            if not isinstance(variable, InterpreterObject):
+                raise mesonlib.MesonBugException(f'set_variable in InterpreterBase called with a non InterpreterObject {variable} of type {type(variable).__name__}')
+        if not isinstance(varname, str):
+            raise InvalidCode('First argument to set_variable must be a string.')
+        if re.match('[_a-zA-Z][_0-9a-zA-Z]*$', varname) is None:
+            raise InvalidCode('Invalid variable name: ' + varname)
+        if varname in self.builtin:
+            raise InvalidCode(f'Tried to overwrite internal variable "{varname}"')
+        self.variables[varname] = variable
+
+    def get_variable(self, varname: str) -> InterpreterObject:
+        if varname in self.builtin:
+            return self.builtin[varname]
+        if varname in self.variables:
+            return self.variables[varname]
+        raise InvalidCode(f'Unknown variable "{varname}".')
+
+    def validate_extraction(self, buildtarget: mesonlib.HoldableObject) -> None:
+        raise InterpreterException('validate_extraction is not implemented in this context (please file a bug)')
diff --git a/vendored-meson/meson/mesonbuild/interpreterbase/operator.py b/vendored-meson/meson/mesonbuild/interpreterbase/operator.py
new file mode 100644
index 000000000000..5dec8d0a8671
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/interpreterbase/operator.py
@@ -0,0 +1,32 @@
+# SPDX-license-identifier: Apache-2.0
+
+from enum import Enum
+
+class MesonOperator(Enum):
+    # Arithmetic
+    PLUS = '+'
+    MINUS = '-'
+    TIMES = '*'
+    DIV = '/'
+    MOD = '%'
+
+    UMINUS = 'uminus'
+
+    # Logic
+    NOT = 'not'
+
+    # Should return the boolsche interpretation of the value (`'' == false` for instance)
+    BOOL = 'bool()'
+
+    # Comparison
+    EQUALS = '=='
+    NOT_EQUALS = '!='
+    GREATER = '>'
+    LESS = '<'
+    GREATER_EQUALS = '>='
+    LESS_EQUALS = '<='
+
+    # Container
+    IN = 'in'
+    NOT_IN = 'not in'
+    INDEX = '[]'
diff --git a/vendored-meson/meson/mesonbuild/linkers/__init__.py b/vendored-meson/meson/mesonbuild/linkers/__init__.py
new file mode 100644
index 000000000000..7c3569400adb
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/linkers/__init__.py
@@ -0,0 +1,32 @@
+# Copyright 2012-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from .base import ArLikeLinker, RSPFileSyntax
+from .detect import (
+    defaults,
+    guess_win_linker,
+    guess_nix_linker,
+)
+
+__all__ = [
+    # base.py
+    'ArLikeLinker',
+    'RSPFileSyntax',
+
+    # detect.py
+    'defaults',
+    'guess_win_linker',
+    'guess_nix_linker',
+]
diff --git a/vendored-meson/meson/mesonbuild/linkers/base.py b/vendored-meson/meson/mesonbuild/linkers/base.py
new file mode 100644
index 000000000000..a656bb40c0eb
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/linkers/base.py
@@ -0,0 +1,50 @@
+# Copyright 2012-2023 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     https://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Core public classes for linkers.
+from __future__ import annotations
+
+import enum
+import typing as T
+
+if T.TYPE_CHECKING:
+    from ..environment import Environment
+
+
+@enum.unique
+class RSPFileSyntax(enum.Enum):
+
+    """Which RSP file syntax the compiler supports."""
+
+    MSVC = enum.auto()
+    GCC = enum.auto()
+
+
+class ArLikeLinker:
+    # POSIX requires supporting the dash, GNU permits omitting it
+    std_args = ['-csr']
+
+    def can_linker_accept_rsp(self) -> bool:
+        # armar / AIX can't accept arguments using the @rsp syntax
+        # in fact, only the 'ar' id can
+        return False
+
+    def get_std_link_args(self, env: 'Environment', is_thin: bool) -> T.List[str]:
+        return self.std_args
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return [target]
+
+    def rsp_file_syntax(self) -> RSPFileSyntax:
+        return RSPFileSyntax.GCC
diff --git a/vendored-meson/meson/mesonbuild/linkers/detect.py b/vendored-meson/meson/mesonbuild/linkers/detect.py
new file mode 100644
index 000000000000..e09a28ec234f
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/linkers/detect.py
@@ -0,0 +1,234 @@
+# Copyright 2012-2022 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+
+from .. import mlog
+from ..mesonlib import (
+    EnvironmentException,
+    Popen_safe, Popen_safe_logged, join_args, search_version
+)
+
+import re
+import shlex
+import typing as T
+
+if T.TYPE_CHECKING:
+    from .linkers import DynamicLinker, GnuDynamicLinker
+    from ..environment import Environment
+    from ..compilers import Compiler
+    from ..mesonlib import MachineChoice
+
+defaults: T.Dict[str, T.List[str]] = {}
+defaults['static_linker'] = ['ar', 'gar']
+defaults['vs_static_linker'] = ['lib']
+defaults['clang_cl_static_linker'] = ['llvm-lib']
+defaults['cuda_static_linker'] = ['nvlink']
+defaults['gcc_static_linker'] = ['gcc-ar']
+defaults['clang_static_linker'] = ['llvm-ar']
+
+def __failed_to_detect_linker(compiler: T.List[str], args: T.List[str], stdout: str, stderr: str) -> 'T.NoReturn':
+    msg = 'Unable to detect linker for compiler `{}`\nstdout: {}\nstderr: {}'.format(
+        join_args(compiler + args), stdout, stderr)
+    raise EnvironmentException(msg)
+
+
+def guess_win_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Type['Compiler'],
+                     comp_version: str, for_machine: MachineChoice, *,
+                     use_linker_prefix: bool = True, invoked_directly: bool = True,
+                     extra_args: T.Optional[T.List[str]] = None) -> 'DynamicLinker':
+    from . import linkers
+    env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+
+    # Explicitly pass logo here so that we can get the version of link.exe
+    if not use_linker_prefix or comp_class.LINKER_PREFIX is None:
+        check_args = ['/logo', '--version']
+    elif isinstance(comp_class.LINKER_PREFIX, str):
+        check_args = [comp_class.LINKER_PREFIX + '/logo', comp_class.LINKER_PREFIX + '--version']
+    elif isinstance(comp_class.LINKER_PREFIX, list):
+        check_args = comp_class.LINKER_PREFIX + ['/logo'] + comp_class.LINKER_PREFIX + ['--version']
+
+    check_args += env.coredata.get_external_link_args(for_machine, comp_class.language)
+
+    override = []  # type: T.List[str]
+    value = env.lookup_binary_entry(for_machine, comp_class.language + '_ld')
+    if value is not None:
+        override = comp_class.use_linker_args(value[0], comp_version)
+        check_args += override
+
+    if extra_args is not None:
+        check_args.extend(extra_args)
+
+    p, o, _ = Popen_safe(compiler + check_args)
+    if 'LLD' in o.split('\n', maxsplit=1)[0]:
+        if '(compatible with GNU linkers)' in o:
+            return linkers.LLVMDynamicLinker(
+                compiler, for_machine, comp_class.LINKER_PREFIX,
+                override, version=search_version(o))
+        elif not invoked_directly:
+            return linkers.ClangClDynamicLinker(
+                for_machine, override, exelist=compiler, prefix=comp_class.LINKER_PREFIX,
+                version=search_version(o), direct=False, machine=None)
+
+    if value is not None and invoked_directly:
+        compiler = value
+        # We've already handled the non-direct case above
+
+    p, o, e = Popen_safe(compiler + check_args)
+    if 'LLD' in o.split('\n', maxsplit=1)[0]:
+        return linkers.ClangClDynamicLinker(
+            for_machine, [],
+            prefix=comp_class.LINKER_PREFIX if use_linker_prefix else [],
+            exelist=compiler, version=search_version(o), direct=invoked_directly)
+    elif 'OPTLINK' in o:
+        # Optlink's stdout *may* begin with a \r character.
+        return linkers.OptlinkDynamicLinker(compiler, for_machine, version=search_version(o))
+    elif o.startswith('Microsoft') or e.startswith('Microsoft'):
+        out = o or e
+        match = re.search(r'.*(X86|X64|ARM|ARM64).*', out)
+        if match:
+            target = str(match.group(1))
+        else:
+            target = 'x86'
+
+        return linkers.MSVCDynamicLinker(
+            for_machine, [], machine=target, exelist=compiler,
+            prefix=comp_class.LINKER_PREFIX if use_linker_prefix else [],
+            version=search_version(out), direct=invoked_directly)
+    elif 'GNU coreutils' in o:
+        import shutil
+        fullpath = shutil.which(compiler[0])
+        raise EnvironmentException(
+            f"Found GNU link.exe instead of MSVC link.exe in {fullpath}.\n"
+            "This link.exe is not a linker.\n"
+            "You may need to reorder entries to your %PATH% variable to resolve this.")
+    __failed_to_detect_linker(compiler, check_args, o, e)
+
+def guess_nix_linker(env: 'Environment', compiler: T.List[str], comp_class: T.Type['Compiler'],
+                     comp_version: str, for_machine: MachineChoice, *,
+                     extra_args: T.Optional[T.List[str]] = None) -> 'DynamicLinker':
+    """Helper for guessing what linker to use on Unix-Like OSes.
+
+    :compiler: Invocation to use to get linker
+    :comp_class: The Compiler Type (uninstantiated)
+    :comp_version: The compiler version string
+    :for_machine: which machine this linker targets
+    :extra_args: Any additional arguments required (such as a source file)
+    """
+    from . import linkers
+    env.coredata.add_lang_args(comp_class.language, comp_class, for_machine, env)
+    extra_args = extra_args or []
+
+    ldflags = env.coredata.get_external_link_args(for_machine, comp_class.language)
+    extra_args += comp_class._unix_args_to_native(ldflags, env.machines[for_machine])
+
+    if isinstance(comp_class.LINKER_PREFIX, str):
+        check_args = [comp_class.LINKER_PREFIX + '--version'] + extra_args
+    else:
+        check_args = comp_class.LINKER_PREFIX + ['--version'] + extra_args
+
+    override = []  # type: T.List[str]
+    value = env.lookup_binary_entry(for_machine, comp_class.language + '_ld')
+    if value is not None:
+        override = comp_class.use_linker_args(value[0], comp_version)
+        check_args += override
+
+    mlog.debug('-----')
+    p, o, e = Popen_safe_logged(compiler + check_args, msg='Detecting linker via')
+
+    v = search_version(o + e)
+    linker: DynamicLinker
+    if 'LLD' in o.split('\n', maxsplit=1)[0]:
+        if isinstance(comp_class.LINKER_PREFIX, str):
+            cmd = compiler + override + [comp_class.LINKER_PREFIX + '-v'] + extra_args
+        else:
+            cmd = compiler + override + comp_class.LINKER_PREFIX + ['-v'] + extra_args
+        _, newo, newerr = Popen_safe_logged(cmd, msg='Detecting LLD linker via')
+
+        lld_cls: T.Type[DynamicLinker]
+        if 'ld64.lld' in newerr:
+            lld_cls = linkers.LLVMLD64DynamicLinker
+        else:
+            lld_cls = linkers.LLVMDynamicLinker
+
+        linker = lld_cls(
+            compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
+    elif 'Snapdragon' in e and 'LLVM' in e:
+        linker = linkers.QualcommLLVMDynamicLinker(
+            compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
+    elif e.startswith('lld-link: '):
+        # The LLD MinGW frontend didn't respond to --version before version 9.0.0,
+        # and produced an error message about failing to link (when no object
+        # files were specified), instead of printing the version number.
+        # Let's try to extract the linker invocation command to grab the version.
+
+        _, o, e = Popen_safe(compiler + check_args + ['-v'])
+
+        try:
+            linker_cmd = re.match(r'.*\n(.*?)\nlld-link: ', e, re.DOTALL).group(1)
+            linker_cmd = shlex.split(linker_cmd)[0]
+        except (AttributeError, IndexError, ValueError):
+            pass
+        else:
+            _, o, e = Popen_safe([linker_cmd, '--version'])
+            v = search_version(o)
+
+        linker = linkers.LLVMDynamicLinker(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
+    # first might be apple clang, second is for real gcc, the third is icc
+    elif e.endswith('(use -v to see invocation)\n') or 'macosx_version' in e or 'ld: unknown option:' in e:
+        if isinstance(comp_class.LINKER_PREFIX, str):
+            cmd = compiler + [comp_class.LINKER_PREFIX + '-v'] + extra_args
+        else:
+            cmd = compiler + comp_class.LINKER_PREFIX + ['-v'] + extra_args
+        _, newo, newerr = Popen_safe_logged(cmd, msg='Detecting Apple linker via')
+
+        for line in newerr.split('\n'):
+            if 'PROJECT:ld' in line or 'PROJECT:dyld' in line:
+                v = line.split('-')[1]
+                break
+        else:
+            __failed_to_detect_linker(compiler, check_args, o, e)
+        linker = linkers.AppleDynamicLinker(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
+    elif 'GNU' in o or 'GNU' in e:
+        gnu_cls: T.Type[GnuDynamicLinker]
+        # this is always the only thing on stdout, except for swift
+        # which may or may not redirect the linker stdout to stderr
+        if o.startswith('GNU gold') or e.startswith('GNU gold'):
+            gnu_cls = linkers.GnuGoldDynamicLinker
+        elif o.startswith('mold') or e.startswith('mold'):
+            gnu_cls = linkers.MoldDynamicLinker
+        else:
+            gnu_cls = linkers.GnuBFDDynamicLinker
+        linker = gnu_cls(compiler, for_machine, comp_class.LINKER_PREFIX, override, version=v)
+    elif 'Solaris' in e or 'Solaris' in o:
+        for line in (o+e).split('\n'):
+            if 'ld: Software Generation Utilities' in line:
+                v = line.split(':')[2].lstrip()
+                break
+        else:
+            v = 'unknown version'
+        linker = linkers.SolarisDynamicLinker(
+            compiler, for_machine, comp_class.LINKER_PREFIX, override,
+            version=v)
+    elif 'ld: 0706-012 The -- flag is not recognized' in e:
+        if isinstance(comp_class.LINKER_PREFIX, str):
+            _, _, e = Popen_safe(compiler + [comp_class.LINKER_PREFIX + '-V'] + extra_args)
+        else:
+            _, _, e = Popen_safe(compiler + comp_class.LINKER_PREFIX + ['-V'] + extra_args)
+        linker = linkers.AIXDynamicLinker(
+            compiler, for_machine, comp_class.LINKER_PREFIX, override,
+            version=search_version(e))
+    else:
+        __failed_to_detect_linker(compiler, check_args, o, e)
+    return linker
diff --git a/vendored-meson/meson/mesonbuild/linkers/linkers.py b/vendored-meson/meson/mesonbuild/linkers/linkers.py
new file mode 100644
index 000000000000..8f413c857901
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/linkers/linkers.py
@@ -0,0 +1,1617 @@
+# Copyright 2012-2022 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import abc
+import os
+import typing as T
+import re
+
+from .base import ArLikeLinker, RSPFileSyntax
+from .. import mesonlib
+from ..mesonlib import EnvironmentException, MesonException
+from ..arglist import CompilerArgs
+
+if T.TYPE_CHECKING:
+    from ..coredata import KeyedOptionDictType
+    from ..environment import Environment
+    from ..mesonlib import MachineChoice
+
+
+class StaticLinker:
+
+    id: str
+
+    def __init__(self, exelist: T.List[str]):
+        self.exelist = exelist
+
+    def compiler_args(self, args: T.Optional[T.Iterable[str]] = None) -> CompilerArgs:
+        return CompilerArgs(self, args)
+
+    def can_linker_accept_rsp(self) -> bool:
+        """
+        Determines whether the linker can accept arguments using the @rsp syntax.
+        """
+        return mesonlib.is_windows()
+
+    def get_base_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        """Like compilers.get_base_link_args, but for the static linker."""
+        return []
+
+    def get_exelist(self) -> T.List[str]:
+        return self.exelist.copy()
+
+    def get_std_link_args(self, env: 'Environment', is_thin: bool) -> T.List[str]:
+        return []
+
+    def get_buildtype_linker_args(self, buildtype: str) -> T.List[str]:
+        return []
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return []
+
+    def get_coverage_link_args(self) -> T.List[str]:
+        return []
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: T.Tuple[str, ...], build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
+
+    def thread_link_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def openmp_flags(self) -> T.List[str]:
+        return []
+
+    def get_option_link_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        return []
+
+    @classmethod
+    def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]:
+        return args[:]
+
+    @classmethod
+    def native_args_to_unix(cls, args: T.List[str]) -> T.List[str]:
+        return args[:]
+
+    def get_link_debugfile_name(self, targetfile: str) -> T.Optional[str]:
+        return None
+
+    def get_link_debugfile_args(self, targetfile: str) -> T.List[str]:
+        # Static libraries do not have PDB files
+        return []
+
+    def get_always_args(self) -> T.List[str]:
+        return []
+
+    def get_linker_always_args(self) -> T.List[str]:
+        return []
+
+    def rsp_file_syntax(self) -> RSPFileSyntax:
+        """The format of the RSP file that this compiler supports.
+
+        If `self.can_linker_accept_rsp()` returns True, then this needs to
+        be implemented
+        """
+        assert not self.can_linker_accept_rsp(), f'{self.id} linker accepts RSP, but doesn\' provide a supported format, this is a bug'
+        raise EnvironmentException(f'{self.id} does not implement rsp format, this shouldn\'t be called')
+
+
+class VisualStudioLikeLinker:
+    always_args = ['/NOLOGO']
+
+    def __init__(self, machine: str):
+        self.machine = machine
+
+    def get_always_args(self) -> T.List[str]:
+        return self.always_args.copy()
+
+    def get_linker_always_args(self) -> T.List[str]:
+        return self.always_args.copy()
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        args: T.List[str] = []
+        if self.machine:
+            args += ['/MACHINE:' + self.machine]
+        args += ['/OUT:' + target]
+        return args
+
+    @classmethod
+    def unix_args_to_native(cls, args: T.List[str]) -> T.List[str]:
+        from ..compilers.c import VisualStudioCCompiler
+        return VisualStudioCCompiler.unix_args_to_native(args)
+
+    @classmethod
+    def native_args_to_unix(cls, args: T.List[str]) -> T.List[str]:
+        from ..compilers.c import VisualStudioCCompiler
+        return VisualStudioCCompiler.native_args_to_unix(args)
+
+    def rsp_file_syntax(self) -> RSPFileSyntax:
+        return RSPFileSyntax.MSVC
+
+
+class VisualStudioLinker(VisualStudioLikeLinker, StaticLinker):
+
+    """Microsoft's lib static linker."""
+
+    def __init__(self, exelist: T.List[str], machine: str):
+        StaticLinker.__init__(self, exelist)
+        VisualStudioLikeLinker.__init__(self, machine)
+
+
+class IntelVisualStudioLinker(VisualStudioLikeLinker, StaticLinker):
+
+    """Intel's xilib static linker."""
+
+    def __init__(self, exelist: T.List[str], machine: str):
+        StaticLinker.__init__(self, exelist)
+        VisualStudioLikeLinker.__init__(self, machine)
+
+
+class ArLinker(ArLikeLinker, StaticLinker):
+    id = 'ar'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice, exelist: T.List[str]):
+        super().__init__(exelist)
+        stdo = mesonlib.Popen_safe(self.exelist + ['-h'])[1]
+        # Enable deterministic builds if they are available.
+        stdargs = 'csr'
+        thinargs = ''
+        if '[D]' in stdo:
+            stdargs += 'D'
+        if '[T]' in stdo:
+            thinargs = 'T'
+        self.std_args = [stdargs]
+        self.std_thin_args = [stdargs + thinargs]
+        self.can_rsp = '@<' in stdo
+        self.for_machine = for_machine
+
+    def can_linker_accept_rsp(self) -> bool:
+        return self.can_rsp
+
+    def get_std_link_args(self, env: 'Environment', is_thin: bool) -> T.List[str]:
+        # Thin archives are a GNU extension not supported by the system linkers
+        # on Mac OS X, Solaris, or illumos, so don't build them on those OSes.
+        # OS X ld rejects with: "file built for unknown-unsupported file format"
+        # illumos/Solaris ld rejects with: "unknown file type"
+        if is_thin and not env.machines[self.for_machine].is_darwin() \
+          and not env.machines[self.for_machine].is_sunos():
+            return self.std_thin_args
+        else:
+            return self.std_args
+
+
+class AppleArLinker(ArLinker):
+
+    # mostly this is used to determine that we need to call ranlib
+
+    id = 'applear'
+
+
+class ArmarLinker(ArLikeLinker, StaticLinker):
+    id = 'armar'
+
+
+class DLinker(StaticLinker):
+    def __init__(self, exelist: T.List[str], arch: str, *, rsp_syntax: RSPFileSyntax = RSPFileSyntax.GCC):
+        super().__init__(exelist)
+        self.id = exelist[0]
+        self.arch = arch
+        self.__rsp_syntax = rsp_syntax
+
+    def get_std_link_args(self, env: 'Environment', is_thin: bool) -> T.List[str]:
+        return ['-lib']
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return ['-of=' + target]
+
+    def get_linker_always_args(self) -> T.List[str]:
+        if mesonlib.is_windows():
+            if self.arch == 'x86_64':
+                return ['-m64']
+            elif self.arch == 'x86_mscoff' and self.id == 'dmd':
+                return ['-m32mscoff']
+            return ['-m32']
+        return []
+
+    def rsp_file_syntax(self) -> RSPFileSyntax:
+        return self.__rsp_syntax
+
+
+class CcrxLinker(StaticLinker):
+
+    def __init__(self, exelist: T.List[str]):
+        super().__init__(exelist)
+        self.id = 'rlink'
+
+    def can_linker_accept_rsp(self) -> bool:
+        return False
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return [f'-output={target}']
+
+    def get_linker_always_args(self) -> T.List[str]:
+        return ['-nologo', '-form=library']
+
+
+class Xc16Linker(StaticLinker):
+
+    def __init__(self, exelist: T.List[str]):
+        super().__init__(exelist)
+        self.id = 'xc16-ar'
+
+    def can_linker_accept_rsp(self) -> bool:
+        return False
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return [f'{target}']
+
+    def get_linker_always_args(self) -> T.List[str]:
+        return ['rcs']
+
+class CompCertLinker(StaticLinker):
+
+    def __init__(self, exelist: T.List[str]):
+        super().__init__(exelist)
+        self.id = 'ccomp'
+
+    def can_linker_accept_rsp(self) -> bool:
+        return False
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return [f'-o{target}']
+
+
+class TILinker(StaticLinker):
+
+    def __init__(self, exelist: T.List[str]):
+        super().__init__(exelist)
+        self.id = 'ti-ar'
+
+    def can_linker_accept_rsp(self) -> bool:
+        return False
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return [f'{target}']
+
+    def get_linker_always_args(self) -> T.List[str]:
+        return ['-r']
+
+
+class C2000Linker(TILinker):
+    # Required for backwards compat with projects created before ti-cgt support existed
+    id = 'ar2000'
+
+
+class AIXArLinker(ArLikeLinker, StaticLinker):
+    id = 'aixar'
+    std_args = ['-csr', '-Xany']
+
+
+class MetrowerksStaticLinker(StaticLinker):
+
+    def can_linker_accept_rsp(self) -> bool:
+        return True
+
+    def get_linker_always_args(self) -> T.List[str]:
+        return ['-library']
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return ['-o', target]
+
+    def rsp_file_syntax(self) -> RSPFileSyntax:
+        return RSPFileSyntax.GCC
+
+
+class MetrowerksStaticLinkerARM(MetrowerksStaticLinker):
+    id = 'mwldarm'
+
+
+class MetrowerksStaticLinkerEmbeddedPowerPC(MetrowerksStaticLinker):
+    id = 'mwldeppc'
+
+def prepare_rpaths(raw_rpaths: T.Tuple[str, ...], build_dir: str, from_dir: str) -> T.List[str]:
+    # The rpaths we write must be relative if they point to the build dir,
+    # because otherwise they have different length depending on the build
+    # directory. This breaks reproducible builds.
+    internal_format_rpaths = [evaluate_rpath(p, build_dir, from_dir) for p in raw_rpaths]
+    ordered_rpaths = order_rpaths(internal_format_rpaths)
+    return ordered_rpaths
+
+
+def order_rpaths(rpath_list: T.List[str]) -> T.List[str]:
+    # We want rpaths that point inside our build dir to always override
+    # those pointing to other places in the file system. This is so built
+    # binaries prefer our libraries to the ones that may lie somewhere
+    # in the file system, such as /lib/x86_64-linux-gnu.
+    #
+    # The correct thing to do here would be C++'s std::stable_partition.
+    # Python standard library does not have it, so replicate it with
+    # sort, which is guaranteed to be stable.
+    return sorted(rpath_list, key=os.path.isabs)
+
+
+def evaluate_rpath(p: str, build_dir: str, from_dir: str) -> str:
+    if p == from_dir:
+        return '' # relpath errors out in this case
+    elif os.path.isabs(p):
+        return p # These can be outside of build dir.
+    else:
+        return os.path.relpath(os.path.join(build_dir, p), os.path.join(build_dir, from_dir))
+
+class DynamicLinker(metaclass=abc.ABCMeta):
+
+    """Base class for dynamic linkers."""
+
+    _BUILDTYPE_ARGS: T.Dict[str, T.List[str]] = {
+        'plain': [],
+        'debug': [],
+        'debugoptimized': [],
+        'release': [],
+        'minsize': [],
+        'custom': [],
+    }
+
+    @abc.abstractproperty
+    def id(self) -> str:
+        pass
+
+    def _apply_prefix(self, arg: T.Union[str, T.List[str]]) -> T.List[str]:
+        args = [arg] if isinstance(arg, str) else arg
+        if self.prefix_arg is None:
+            return args
+        elif isinstance(self.prefix_arg, str):
+            return [self.prefix_arg + arg for arg in args]
+        ret: T.List[str] = []
+        for arg in args:
+            ret += self.prefix_arg + [arg]
+        return ret
+
+    def __init__(self, exelist: T.List[str],
+                 for_machine: mesonlib.MachineChoice, prefix_arg: T.Union[str, T.List[str]],
+                 always_args: T.List[str], *, version: str = 'unknown version'):
+        self.exelist = exelist
+        self.for_machine = for_machine
+        self.version = version
+        self.prefix_arg = prefix_arg
+        self.always_args = always_args
+        self.machine: T.Optional[str] = None
+
+    def __repr__(self) -> str:
+        return '<{}: v{} `{}`>'.format(type(self).__name__, self.version, ' '.join(self.exelist))
+
+    def get_id(self) -> str:
+        return self.id
+
+    def get_version_string(self) -> str:
+        return f'({self.id} {self.version})'
+
+    def get_exelist(self) -> T.List[str]:
+        return self.exelist.copy()
+
+    def get_accepts_rsp(self) -> bool:
+        # rsp files are only used when building on Windows because we want to
+        # avoid issues with quoting and max argument length
+        return mesonlib.is_windows()
+
+    def rsp_file_syntax(self) -> RSPFileSyntax:
+        """The format of the RSP file that this compiler supports.
+
+        If `self.can_linker_accept_rsp()` returns True, then this needs to
+        be implemented
+        """
+        return RSPFileSyntax.GCC
+
+    def get_always_args(self) -> T.List[str]:
+        return self.always_args.copy()
+
+    def get_lib_prefix(self) -> str:
+        return ''
+
+    # XXX: is use_ldflags a compiler or a linker attribute?
+
+    def get_option_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        return []
+
+    def has_multi_arguments(self, args: T.List[str], env: 'Environment') -> T.Tuple[bool, bool]:
+        raise EnvironmentException(f'Language {self.id} does not support has_multi_link_arguments.')
+
+    def get_debugfile_name(self, targetfile: str) -> T.Optional[str]:
+        '''Name of debug file written out (see below)'''
+        return None
+
+    def get_debugfile_args(self, targetfile: str) -> T.List[str]:
+        """Some compilers (MSVC) write debug into a separate file.
+
+        This method takes the target object path and returns a list of
+        commands to append to the linker invocation to control where that
+        file is written.
+        """
+        return []
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return []
+
+    def get_std_shared_module_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        return self.get_std_shared_lib_args()
+
+    def get_pie_args(self) -> T.List[str]:
+        # TODO: this really needs to take a boolean and return the args to
+        # disable pie, otherwise it only acts to enable pie if pie *isn't* the
+        # default.
+        raise EnvironmentException(f'Linker {self.id} does not support position-independent executable')
+
+    def get_lto_args(self) -> T.List[str]:
+        return []
+
+    def get_thinlto_cache_args(self, path: str) -> T.List[str]:
+        return []
+
+    def sanitizer_args(self, value: str) -> T.List[str]:
+        return []
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        # We can override these in children by just overriding the
+        # _BUILDTYPE_ARGS value.
+        return self._BUILDTYPE_ARGS[buildtype]
+
+    def get_asneeded_args(self) -> T.List[str]:
+        return []
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        raise EnvironmentException(
+            f'Linker {self.id} does not support link_whole')
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        raise EnvironmentException(
+            f'Linker {self.id} does not support allow undefined')
+
+    @abc.abstractmethod
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        pass
+
+    def get_coverage_args(self) -> T.List[str]:
+        raise EnvironmentException(f"Linker {self.id} doesn't implement coverage data generation.")
+
+    @abc.abstractmethod
+    def get_search_args(self, dirname: str) -> T.List[str]:
+        pass
+
+    def export_dynamic_args(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def import_library_args(self, implibname: str) -> T.List[str]:
+        """The name of the outputted import library.
+
+        This implementation is used only on Windows by compilers that use GNU ld
+        """
+        return []
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return []
+
+    def no_undefined_args(self) -> T.List[str]:
+        """Arguments to error if there are any undefined symbols at link time.
+
+        This is the inverse of get_allow_undefined_args().
+
+        TODO: A future cleanup might merge this and
+              get_allow_undefined_args() into a single method taking a
+              boolean
+        """
+        return []
+
+    def fatal_warnings(self) -> T.List[str]:
+        """Arguments to make all warnings errors."""
+        return []
+
+    def headerpad_args(self) -> T.List[str]:
+        # Only used by the Apple linker
+        return []
+
+    def get_gui_app_args(self, value: bool) -> T.List[str]:
+        # Only used by VisualStudioLikeLinkers
+        return []
+
+    def get_win_subsystem_args(self, value: str) -> T.List[str]:
+        # Only used if supported by the dynamic linker and
+        # only when targeting Windows
+        return []
+
+    def bitcode_args(self) -> T.List[str]:
+        raise MesonException('This linker does not support bitcode bundles')
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: T.Tuple[str, ...], build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str]) -> T.List[str]:
+        return []
+
+    def get_archive_name(self, filename: str) -> str:
+        #Only used by AIX.
+        return str()
+
+    def get_command_to_archive_shlib(self) -> T.List[str]:
+        #Only used by AIX.
+        return []
+
+
+class PosixDynamicLinkerMixin:
+
+    """Mixin class for POSIX-ish linkers.
+
+    This is obviously a pretty small subset of the linker interface, but
+    enough dynamic linkers that meson supports are POSIX-like but not
+    GNU-like that it makes sense to split this out.
+    """
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return ['-o', outputname]
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return ['-shared']
+
+    def get_search_args(self, dirname: str) -> T.List[str]:
+        return ['-L' + dirname]
+
+
+class GnuLikeDynamicLinkerMixin:
+
+    """Mixin class for dynamic linkers that provides gnu-like interface.
+
+    This acts as a base for the GNU linkers (bfd and gold), LLVM's lld, and
+    other linkers like GNU-ld.
+    """
+
+    if T.TYPE_CHECKING:
+        for_machine = MachineChoice.HOST
+        def _apply_prefix(self, arg: T.Union[str, T.List[str]]) -> T.List[str]: ...
+
+    _BUILDTYPE_ARGS: T.Dict[str, T.List[str]] = {
+        'plain': [],
+        'debug': [],
+        'debugoptimized': [],
+        'release': ['-O1'],
+        'minsize': [],
+        'custom': [],
+    }
+
+    _SUBSYSTEMS: T.Dict[str, str] = {
+        "native": "1",
+        "windows": "windows",
+        "console": "console",
+        "posix": "7",
+        "efi_application": "10",
+        "efi_boot_service_driver": "11",
+        "efi_runtime_driver": "12",
+        "efi_rom": "13",
+        "boot_application": "16",
+    }
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        # We can override these in children by just overriding the
+        # _BUILDTYPE_ARGS value.
+        return mesonlib.listify([self._apply_prefix(a) for a in self._BUILDTYPE_ARGS[buildtype]])
+
+    def get_pie_args(self) -> T.List[str]:
+        return ['-pie']
+
+    def get_asneeded_args(self) -> T.List[str]:
+        return self._apply_prefix('--as-needed')
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        if not args:
+            return args
+        return self._apply_prefix('--whole-archive') + args + self._apply_prefix('--no-whole-archive')
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return self._apply_prefix('--allow-shlib-undefined')
+
+    def get_lto_args(self) -> T.List[str]:
+        return ['-flto']
+
+    def sanitizer_args(self, value: str) -> T.List[str]:
+        if value == 'none':
+            return []
+        return ['-fsanitize=' + value]
+
+    def get_coverage_args(self) -> T.List[str]:
+        return ['--coverage']
+
+    def export_dynamic_args(self, env: 'Environment') -> T.List[str]:
+        m = env.machines[self.for_machine]
+        if m.is_windows() or m.is_cygwin():
+            return self._apply_prefix('--export-all-symbols')
+        return self._apply_prefix('-export-dynamic')
+
+    def import_library_args(self, implibname: str) -> T.List[str]:
+        return self._apply_prefix('--out-implib=' + implibname)
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        if env.machines[self.for_machine].is_haiku():
+            return []
+        return ['-pthread']
+
+    def no_undefined_args(self) -> T.List[str]:
+        return self._apply_prefix('--no-undefined')
+
+    def fatal_warnings(self) -> T.List[str]:
+        return self._apply_prefix('--fatal-warnings')
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str]) -> T.List[str]:
+        m = env.machines[self.for_machine]
+        if m.is_windows() or m.is_cygwin():
+            # For PE/COFF the soname argument has no effect
+            return []
+        sostr = '' if soversion is None else '.' + soversion
+        return self._apply_prefix(f'-soname,{prefix}{shlib_name}.{suffix}{sostr}')
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: T.Tuple[str, ...], build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        m = env.machines[self.for_machine]
+        if m.is_windows() or m.is_cygwin():
+            return ([], set())
+        if not rpath_paths and not install_rpath and not build_rpath:
+            return ([], set())
+        args: T.List[str] = []
+        origin_placeholder = '$ORIGIN'
+        processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
+        # Need to deduplicate rpaths, as macOS's install_name_tool
+        # is *very* allergic to duplicate -delete_rpath arguments
+        # when calling depfixer on installation.
+        all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths])
+        rpath_dirs_to_remove: T.Set[bytes] = set()
+        for p in all_paths:
+            rpath_dirs_to_remove.add(p.encode('utf8'))
+        # Build_rpath is used as-is (it is usually absolute).
+        if build_rpath != '':
+            all_paths.add(build_rpath)
+            for p in build_rpath.split(':'):
+                rpath_dirs_to_remove.add(p.encode('utf8'))
+
+        # TODO: should this actually be "for (dragonfly|open)bsd"?
+        if mesonlib.is_dragonflybsd() or mesonlib.is_openbsd():
+            # This argument instructs the compiler to record the value of
+            # ORIGIN in the .dynamic section of the elf. On Linux this is done
+            # by default, but is not on dragonfly/openbsd for some reason. Without this
+            # $ORIGIN in the runtime path will be undefined and any binaries
+            # linked against local libraries will fail to resolve them.
+            args.extend(self._apply_prefix('-z,origin'))
+
+        # In order to avoid relinking for RPATH removal, the binary needs to contain just
+        # enough space in the ELF header to hold the final installation RPATH.
+        paths = ':'.join(all_paths)
+        if len(paths) < len(install_rpath):
+            padding = 'X' * (len(install_rpath) - len(paths))
+            if not paths:
+                paths = padding
+            else:
+                paths = paths + ':' + padding
+        args.extend(self._apply_prefix('-rpath,' + paths))
+
+        # TODO: should this actually be "for solaris/sunos"?
+        if mesonlib.is_sunos():
+            return (args, rpath_dirs_to_remove)
+
+        # Rpaths to use while linking must be absolute. These are not
+        # written to the binary. Needed only with GNU ld:
+        # https://sourceware.org/bugzilla/show_bug.cgi?id=16936
+        # Not needed on Windows or other platforms that don't use RPATH
+        # https://github.com/mesonbuild/meson/issues/1897
+        #
+        # In addition, this linker option tends to be quite long and some
+        # compilers have trouble dealing with it. That's why we will include
+        # one option per folder, like this:
+        #
+        #   -Wl,-rpath-link,/path/to/folder1 -Wl,-rpath,/path/to/folder2 ...
+        #
+        # ...instead of just one single looooong option, like this:
+        #
+        #   -Wl,-rpath-link,/path/to/folder1:/path/to/folder2:...
+        for p in rpath_paths:
+            args.extend(self._apply_prefix('-rpath-link,' + os.path.join(build_dir, p)))
+
+        return (args, rpath_dirs_to_remove)
+
+    def get_win_subsystem_args(self, value: str) -> T.List[str]:
+        # MinGW only directly supports a couple of the possible
+        # PE application types. The raw integer works as an argument
+        # as well, and is always accepted, so we manually map the
+        # other types here. List of all types:
+        # https://github.com/wine-mirror/wine/blob/3ded60bd1654dc689d24a23305f4a93acce3a6f2/include/winnt.h#L2492-L2507
+        versionsuffix = None
+        if ',' in value:
+            value, versionsuffix = value.split(',', 1)
+        newvalue = self._SUBSYSTEMS.get(value)
+        if newvalue is not None:
+            if versionsuffix is not None:
+                newvalue += f':{versionsuffix}'
+            args = [f'--subsystem,{newvalue}']
+        else:
+            raise mesonlib.MesonBugException(f'win_subsystem: {value!r} not handled in MinGW linker. This should not be possible.')
+
+        return self._apply_prefix(args)
+
+
+class AppleDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+    """Apple's ld implementation."""
+
+    id = 'ld64'
+
+    def get_asneeded_args(self) -> T.List[str]:
+        return self._apply_prefix('-dead_strip_dylibs')
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return self._apply_prefix('-undefined,dynamic_lookup')
+
+    def get_std_shared_module_args(self, options: 'KeyedOptionDictType') -> T.List[str]:
+        return ['-bundle'] + self._apply_prefix('-undefined,dynamic_lookup')
+
+    def get_pie_args(self) -> T.List[str]:
+        return []
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        result: T.List[str] = []
+        for a in args:
+            result.extend(self._apply_prefix('-force_load'))
+            result.append(a)
+        return result
+
+    def get_coverage_args(self) -> T.List[str]:
+        return ['--coverage']
+
+    def sanitizer_args(self, value: str) -> T.List[str]:
+        if value == 'none':
+            return []
+        return ['-fsanitize=' + value]
+
+    def no_undefined_args(self) -> T.List[str]:
+        return self._apply_prefix('-undefined,error')
+
+    def headerpad_args(self) -> T.List[str]:
+        return self._apply_prefix('-headerpad_max_install_names')
+
+    def bitcode_args(self) -> T.List[str]:
+        return self._apply_prefix('-bitcode_bundle')
+
+    def fatal_warnings(self) -> T.List[str]:
+        return self._apply_prefix('-fatal_warnings')
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str]) -> T.List[str]:
+        install_name = ['@rpath/', prefix, shlib_name]
+        if soversion is not None:
+            install_name.append('.' + soversion)
+        install_name.append('.dylib')
+        args = ['-install_name', ''.join(install_name)]
+        if darwin_versions:
+            args.extend(['-compatibility_version', darwin_versions[0],
+                         '-current_version', darwin_versions[1]])
+        return args
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: T.Tuple[str, ...], build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        if not rpath_paths and not install_rpath and not build_rpath:
+            return ([], set())
+        args: T.List[str] = []
+        # @loader_path is the equivalent of $ORIGIN on macOS
+        # https://stackoverflow.com/q/26280738
+        origin_placeholder = '@loader_path'
+        processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
+        all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths])
+        if build_rpath != '':
+            all_paths.add(build_rpath)
+        for rp in all_paths:
+            args.extend(self._apply_prefix('-rpath,' + rp))
+
+        return (args, set())
+
+    def get_thinlto_cache_args(self, path: str) -> T.List[str]:
+        return ["-Wl,-cache_path_lto," + path]
+
+
+class LLVMLD64DynamicLinker(AppleDynamicLinker):
+
+    id = 'ld64.lld'
+
+
+class GnuDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
+
+    """Representation of GNU ld.bfd and ld.gold."""
+
+    def get_accepts_rsp(self) -> bool:
+        return True
+
+
+class GnuGoldDynamicLinker(GnuDynamicLinker):
+
+    id = 'ld.gold'
+
+    def get_thinlto_cache_args(self, path: str) -> T.List[str]:
+        return ['-Wl,-plugin-opt,cache-dir=' + path]
+
+
+class GnuBFDDynamicLinker(GnuDynamicLinker):
+
+    id = 'ld.bfd'
+
+
+class MoldDynamicLinker(GnuDynamicLinker):
+
+    id = 'ld.mold'
+
+    def get_thinlto_cache_args(self, path: str) -> T.List[str]:
+        return ['-Wl,--thinlto-cache-dir=' + path]
+
+
+class LLVMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
+
+    """Representation of LLVM's ld.lld linker.
+
+    This is only the gnu-like linker, not the apple like or link.exe like
+    linkers.
+    """
+
+    id = 'ld.lld'
+
+    def __init__(self, exelist: T.List[str],
+                 for_machine: mesonlib.MachineChoice, prefix_arg: T.Union[str, T.List[str]],
+                 always_args: T.List[str], *, version: str = 'unknown version'):
+        super().__init__(exelist, for_machine, prefix_arg, always_args, version=version)
+
+        # Some targets don't seem to support this argument (windows, wasm, ...)
+        _, _, e = mesonlib.Popen_safe(self.exelist + always_args + self._apply_prefix('--allow-shlib-undefined'))
+        # Versions < 9 do not have a quoted argument
+        self.has_allow_shlib_undefined = ('unknown argument: --allow-shlib-undefined' not in e) and ("unknown argument: '--allow-shlib-undefined'" not in e)
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        if self.has_allow_shlib_undefined:
+            return self._apply_prefix('--allow-shlib-undefined')
+        return []
+
+    def get_thinlto_cache_args(self, path: str) -> T.List[str]:
+        return ['-Wl,--thinlto-cache-dir=' + path]
+
+    def get_win_subsystem_args(self, value: str) -> T.List[str]:
+        # lld does not support a numeric subsystem value
+        version = None
+        if ',' in value:
+            value, version = value.split(',', 1)
+        if value in self._SUBSYSTEMS:
+            if version is not None:
+                value += f':{version}'
+            return self._apply_prefix([f'--subsystem,{value}'])
+        else:
+            raise mesonlib.MesonBugException(f'win_subsystem: {value} not handled in lld linker. This should not be possible.')
+
+
+class WASMDynamicLinker(GnuLikeDynamicLinkerMixin, PosixDynamicLinkerMixin, DynamicLinker):
+
+    """Emscripten's wasm-ld."""
+
+    id = 'ld.wasm'
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return ['-sERROR_ON_UNDEFINED_SYMBOLS=0']
+
+    def no_undefined_args(self) -> T.List[str]:
+        return ['-sERROR_ON_UNDEFINED_SYMBOLS=1']
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str]) -> T.List[str]:
+        raise MesonException(f'{self.id} does not support shared libraries.')
+
+    def get_asneeded_args(self) -> T.List[str]:
+        return []
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: T.Tuple[str, ...], build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
+
+
+class CcrxDynamicLinker(DynamicLinker):
+
+    """Linker for Renesas CCrx compiler."""
+
+    id = 'rlink'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice,
+                 *, version: str = 'unknown version'):
+        super().__init__(['rlink.exe'], for_machine, '', [],
+                         version=version)
+
+    def get_accepts_rsp(self) -> bool:
+        return False
+
+    def get_lib_prefix(self) -> str:
+        return '-lib='
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return []
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return [f'-output={outputname}']
+
+    def get_search_args(self, dirname: str) -> 'T.NoReturn':
+        raise OSError('rlink.exe does not have a search dir argument')
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str]) -> T.List[str]:
+        return []
+
+
+class Xc16DynamicLinker(DynamicLinker):
+
+    """Linker for Microchip XC16 compiler."""
+
+    id = 'xc16-gcc'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice,
+                 *, version: str = 'unknown version'):
+        super().__init__(['xc16-gcc'], for_machine, '', [],
+                         version=version)
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        if not args:
+            return args
+        return self._apply_prefix('--start-group') + args + self._apply_prefix('--end-group')
+
+    def get_accepts_rsp(self) -> bool:
+        return False
+
+    def get_lib_prefix(self) -> str:
+        return ''
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return []
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return [f'-o{outputname}']
+
+    def get_search_args(self, dirname: str) -> 'T.NoReturn':
+        raise OSError('xc16-gcc does not have a search dir argument')
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str]) -> T.List[str]:
+        return []
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: T.Tuple[str, ...], build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
+
+class CompCertDynamicLinker(DynamicLinker):
+
+    """Linker for CompCert C compiler."""
+
+    id = 'ccomp'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice,
+                 *, version: str = 'unknown version'):
+        super().__init__(['ccomp'], for_machine, '', [],
+                         version=version)
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        if not args:
+            return args
+        return self._apply_prefix('-Wl,--whole-archive') + args + self._apply_prefix('-Wl,--no-whole-archive')
+
+    def get_accepts_rsp(self) -> bool:
+        return False
+
+    def get_lib_prefix(self) -> str:
+        return ''
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return []
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return [f'-o{outputname}']
+
+    def get_search_args(self, dirname: str) -> T.List[str]:
+        return [f'-L{dirname}']
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str]) -> T.List[str]:
+        raise MesonException(f'{self.id} does not support shared libraries.')
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: T.Tuple[str, ...], build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        return ([], set())
+
+class TIDynamicLinker(DynamicLinker):
+
+    """Linker for Texas Instruments compiler family."""
+
+    id = 'ti'
+
+    def __init__(self, exelist: T.List[str], for_machine: mesonlib.MachineChoice,
+                 *, version: str = 'unknown version'):
+        super().__init__(exelist, for_machine, '', [],
+                         version=version)
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        if not args:
+            return args
+        return self._apply_prefix('--start-group') + args + self._apply_prefix('--end-group')
+
+    def get_accepts_rsp(self) -> bool:
+        return False
+
+    def get_lib_prefix(self) -> str:
+        return '-l='
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return []
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return ['-z', f'--output_file={outputname}']
+
+    def get_search_args(self, dirname: str) -> 'T.NoReturn':
+        raise OSError('TI compilers do not have a search dir argument')
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_always_args(self) -> T.List[str]:
+        return []
+
+
+class C2000DynamicLinker(TIDynamicLinker):
+    # Required for backwards compat with projects created before ti-cgt support existed
+    id = 'cl2000'
+
+
+class ArmDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+    """Linker for the ARM compiler."""
+
+    id = 'armlink'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice,
+                 *, version: str = 'unknown version'):
+        super().__init__(['armlink'], for_machine, '', [],
+                         version=version)
+
+    def get_accepts_rsp(self) -> bool:
+        return False
+
+    def get_std_shared_lib_args(self) -> 'T.NoReturn':
+        raise MesonException('The Arm Linkers do not support shared libraries')
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+
+class ArmClangDynamicLinker(ArmDynamicLinker):
+
+    """Linker used with ARM's clang fork.
+
+    The interface is similar enough to the old ARM ld that it inherits and
+    extends a few things as needed.
+    """
+
+    def export_dynamic_args(self, env: 'Environment') -> T.List[str]:
+        return ['--export_dynamic']
+
+    def import_library_args(self, implibname: str) -> T.List[str]:
+        return ['--symdefs=' + implibname]
+
+class QualcommLLVMDynamicLinker(LLVMDynamicLinker):
+
+    """ARM Linker from Snapdragon LLVM ARM Compiler."""
+
+    id = 'ld.qcld'
+
+
+class NAGDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+    """NAG Fortran linker, ld via gcc indirection.
+
+    Using nagfor -Wl,foo passes option foo to a backend gcc invocation.
+    (This linking gathers the correct objects needed from the nagfor runtime
+    system.)
+    To pass gcc -Wl,foo options (i.e., to ld) one must apply indirection
+    again: nagfor -Wl,-Wl,,foo
+    """
+
+    id = 'nag'
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: T.Tuple[str, ...], build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        if not rpath_paths and not install_rpath and not build_rpath:
+            return ([], set())
+        args: T.List[str] = []
+        origin_placeholder = '$ORIGIN'
+        processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
+        all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths])
+        if build_rpath != '':
+            all_paths.add(build_rpath)
+        for rp in all_paths:
+            args.extend(self._apply_prefix('-Wl,-Wl,,-rpath,,' + rp))
+
+        return (args, set())
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        from ..compilers.fortran import NAGFortranCompiler
+        return NAGFortranCompiler.get_nagfor_quiet(self.version) + ['-Wl,-shared']
+
+
+class PGIDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+    """PGI linker."""
+
+    id = 'pgi'
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str]) -> T.List[str]:
+        return []
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        # PGI -shared is Linux only.
+        if mesonlib.is_windows():
+            return ['-Bdynamic', '-Mmakedll']
+        elif mesonlib.is_linux():
+            return ['-shared']
+        return []
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: T.Tuple[str, ...], build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        if not env.machines[self.for_machine].is_windows():
+            return (['-R' + os.path.join(build_dir, p) for p in rpath_paths], set())
+        return ([], set())
+
+NvidiaHPC_DynamicLinker = PGIDynamicLinker
+
+
+class PGIStaticLinker(StaticLinker):
+    def __init__(self, exelist: T.List[str]):
+        super().__init__(exelist)
+        self.id = 'ar'
+        self.std_args = ['-r']
+
+    def get_std_link_args(self, env: 'Environment', is_thin: bool) -> T.List[str]:
+        return self.std_args
+
+    def get_output_args(self, target: str) -> T.List[str]:
+        return [target]
+
+NvidiaHPC_StaticLinker = PGIStaticLinker
+
+
+class VisualStudioLikeLinkerMixin:
+
+    """Mixin class for dynamic linkers that act like Microsoft's link.exe."""
+
+    if T.TYPE_CHECKING:
+        for_machine = MachineChoice.HOST
+        def _apply_prefix(self, arg: T.Union[str, T.List[str]]) -> T.List[str]: ...
+
+    _BUILDTYPE_ARGS: T.Dict[str, T.List[str]] = {
+        'plain': [],
+        'debug': [],
+        'debugoptimized': [],
+        # The otherwise implicit REF and ICF linker optimisations are disabled by
+        # /DEBUG. REF implies ICF.
+        'release': ['/OPT:REF'],
+        'minsize': ['/INCREMENTAL:NO', '/OPT:REF'],
+        'custom': [],
+    }
+
+    def __init__(self, exelist: T.List[str], for_machine: mesonlib.MachineChoice,
+                 prefix_arg: T.Union[str, T.List[str]], always_args: T.List[str], *,
+                 version: str = 'unknown version', direct: bool = True, machine: str = 'x86'):
+        # There's no way I can find to make mypy understand what's going on here
+        super().__init__(exelist, for_machine, prefix_arg, always_args, version=version)  # type: ignore
+        self.machine = machine
+        self.direct = direct
+
+    def get_buildtype_args(self, buildtype: str) -> T.List[str]:
+        return mesonlib.listify([self._apply_prefix(a) for a in self._BUILDTYPE_ARGS[buildtype]])
+
+    def invoked_by_compiler(self) -> bool:
+        return not self.direct
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return self._apply_prefix(['/MACHINE:' + self.machine, '/OUT:' + outputname])
+
+    def get_always_args(self) -> T.List[str]:
+        parent = super().get_always_args() # type: ignore
+        return self._apply_prefix('/nologo') + T.cast('T.List[str]', parent)
+
+    def get_search_args(self, dirname: str) -> T.List[str]:
+        return self._apply_prefix('/LIBPATH:' + dirname)
+
+    def get_std_shared_lib_args(self) -> T.List[str]:
+        return self._apply_prefix('/DLL')
+
+    def get_debugfile_name(self, targetfile: str) -> str:
+        basename = targetfile.rsplit('.', maxsplit=1)[0]
+        return basename + '.pdb'
+
+    def get_debugfile_args(self, targetfile: str) -> T.List[str]:
+        return self._apply_prefix(['/DEBUG', '/PDB:' + self.get_debugfile_name(targetfile)])
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        # Only since VS2015
+        args = mesonlib.listify(args)
+        l: T.List[str] = []
+        for a in args:
+            l.extend(self._apply_prefix('/WHOLEARCHIVE:' + a))
+        return l
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str]) -> T.List[str]:
+        return []
+
+    def import_library_args(self, implibname: str) -> T.List[str]:
+        """The command to generate the import library."""
+        return self._apply_prefix(['/IMPLIB:' + implibname])
+
+    def rsp_file_syntax(self) -> RSPFileSyntax:
+        return RSPFileSyntax.MSVC
+
+
+class MSVCDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
+
+    """Microsoft's Link.exe."""
+
+    id = 'link'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice, always_args: T.List[str], *,
+                 exelist: T.Optional[T.List[str]] = None,
+                 prefix: T.Union[str, T.List[str]] = '',
+                 machine: str = 'x86', version: str = 'unknown version',
+                 direct: bool = True):
+        super().__init__(exelist or ['link.exe'], for_machine,
+                         prefix, always_args, machine=machine, version=version, direct=direct)
+
+    def get_always_args(self) -> T.List[str]:
+        return self._apply_prefix(['/nologo', '/release']) + super().get_always_args()
+
+    def get_gui_app_args(self, value: bool) -> T.List[str]:
+        return self.get_win_subsystem_args("windows" if value else "console")
+
+    def get_win_subsystem_args(self, value: str) -> T.List[str]:
+        return self._apply_prefix([f'/SUBSYSTEM:{value.upper()}'])
+
+
+class ClangClDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
+
+    """Clang's lld-link.exe."""
+
+    id = 'lld-link'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice, always_args: T.List[str], *,
+                 exelist: T.Optional[T.List[str]] = None,
+                 prefix: T.Union[str, T.List[str]] = '',
+                 machine: str = 'x86', version: str = 'unknown version',
+                 direct: bool = True):
+        super().__init__(exelist or ['lld-link.exe'], for_machine,
+                         prefix, always_args, machine=machine, version=version, direct=direct)
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        # If we're being driven indirectly by clang just skip /MACHINE
+        # as clang's target triple will handle the machine selection
+        if self.machine is None:
+            return self._apply_prefix([f"/OUT:{outputname}"])
+
+        return super().get_output_args(outputname)
+
+    def get_gui_app_args(self, value: bool) -> T.List[str]:
+        return self.get_win_subsystem_args("windows" if value else "console")
+
+    def get_win_subsystem_args(self, value: str) -> T.List[str]:
+        return self._apply_prefix([f'/SUBSYSTEM:{value.upper()}'])
+
+    def get_thinlto_cache_args(self, path: str) -> T.List[str]:
+        return ["/lldltocache:" + path]
+
+
+class XilinkDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
+
+    """Intel's Xilink.exe."""
+
+    id = 'xilink'
+
+    def __init__(self, for_machine: mesonlib.MachineChoice, always_args: T.List[str], *,
+                 exelist: T.Optional[T.List[str]] = None,
+                 prefix: T.Union[str, T.List[str]] = '',
+                 machine: str = 'x86', version: str = 'unknown version',
+                 direct: bool = True):
+        super().__init__(['xilink.exe'], for_machine, '', always_args, version=version)
+
+    def get_gui_app_args(self, value: bool) -> T.List[str]:
+        return self.get_win_subsystem_args("windows" if value else "console")
+
+    def get_win_subsystem_args(self, value: str) -> T.List[str]:
+        return self._apply_prefix([f'/SUBSYSTEM:{value.upper()}'])
+
+
+class SolarisDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+    """Sys-V derived linker used on Solaris and OpenSolaris."""
+
+    id = 'ld.solaris'
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        if not args:
+            return args
+        return self._apply_prefix('--whole-archive') + args + self._apply_prefix('--no-whole-archive')
+
+    def get_pie_args(self) -> T.List[str]:
+        # Available in Solaris 11.2 and later
+        pc, stdo, stde = mesonlib.Popen_safe(self.exelist + self._apply_prefix('-zhelp'))
+        for line in (stdo + stde).split('\n'):
+            if '-z type' in line:
+                if 'pie' in line:
+                    return ['-z', 'type=pie']
+                break
+        return []
+
+    def get_asneeded_args(self) -> T.List[str]:
+        return self._apply_prefix(['-z', 'ignore'])
+
+    def no_undefined_args(self) -> T.List[str]:
+        return ['-z', 'defs']
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return ['-z', 'nodefs']
+
+    def fatal_warnings(self) -> T.List[str]:
+        return ['-z', 'fatal-warnings']
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: T.Tuple[str, ...], build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        if not rpath_paths and not install_rpath and not build_rpath:
+            return ([], set())
+        processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
+        all_paths = mesonlib.OrderedSet([os.path.join('$ORIGIN', p) for p in processed_rpaths])
+        rpath_dirs_to_remove: T.Set[bytes] = set()
+        for p in all_paths:
+            rpath_dirs_to_remove.add(p.encode('utf8'))
+        if build_rpath != '':
+            all_paths.add(build_rpath)
+            for p in build_rpath.split(':'):
+                rpath_dirs_to_remove.add(p.encode('utf8'))
+
+        # In order to avoid relinking for RPATH removal, the binary needs to contain just
+        # enough space in the ELF header to hold the final installation RPATH.
+        paths = ':'.join(all_paths)
+        if len(paths) < len(install_rpath):
+            padding = 'X' * (len(install_rpath) - len(paths))
+            if not paths:
+                paths = padding
+            else:
+                paths = paths + ':' + padding
+        return (self._apply_prefix(f'-rpath,{paths}'), rpath_dirs_to_remove)
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str]) -> T.List[str]:
+        sostr = '' if soversion is None else '.' + soversion
+        return self._apply_prefix(f'-soname,{prefix}{shlib_name}.{suffix}{sostr}')
+
+
+class AIXDynamicLinker(PosixDynamicLinkerMixin, DynamicLinker):
+
+    """Sys-V derived linker used on AIX"""
+
+    id = 'ld.aix'
+
+    def get_always_args(self) -> T.List[str]:
+        return self._apply_prefix(['-bnoipath', '-bbigtoc']) + super().get_always_args()
+
+    def no_undefined_args(self) -> T.List[str]:
+        return self._apply_prefix(['-bernotok'])
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return self._apply_prefix(['-berok'])
+
+    def get_archive_name(self, filename: str) -> str:
+        # In AIX we allow the shared library name to have the lt_version and so_version.
+        # But the archive name must just be .a .
+        # For Example shared object can have the name libgio.so.0.7200.1 but the archive
+        # must have the name libgio.a having libgio.a (libgio.so.0.7200.1) in the
+        # archive. This regular expression is to do the same.
+        filename = re.sub('[.][a]([.]?([0-9]+))*([.]?([a-z]+))*', '.a', filename.replace('.so', '.a'))
+        return filename
+
+    def get_command_to_archive_shlib(self) -> T.List[str]:
+        # Archive shared library object and remove the shared library object,
+        # since it already exists in the archive.
+        command = ['ar', '-q', '-v', '$out', '$in', '&&', 'rm', '-f', '$in']
+        return command
+
+    def get_link_whole_for(self, args: T.List[str]) -> T.List[str]:
+        # AIX's linker always links the whole archive: "The ld command
+        # processes all input files in the same manner, whether they are
+        # archives or not."
+        return args
+
+    def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
+                         rpath_paths: T.Tuple[str, ...], build_rpath: str,
+                         install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
+        all_paths: mesonlib.OrderedSet[str] = mesonlib.OrderedSet()
+        # install_rpath first, followed by other paths, and the system path last
+        if install_rpath != '':
+            all_paths.add(install_rpath)
+        if build_rpath != '':
+            all_paths.add(build_rpath)
+        for p in rpath_paths:
+            all_paths.add(os.path.join(build_dir, p))
+        # We should consider allowing the $LIBPATH environment variable
+        # to override sys_path.
+        sys_path = env.get_compiler_system_lib_dirs(self.for_machine)
+        if len(sys_path) == 0:
+            # get_compiler_system_lib_dirs doesn't support our compiler.
+            # Use the default system library path
+            all_paths.update(['/usr/lib', '/lib'])
+        else:
+            # Include the compiler's default library paths, but filter out paths that don't exist
+            for p in sys_path:
+                if os.path.isdir(p):
+                    all_paths.add(p)
+        return (self._apply_prefix('-blibpath:' + ':'.join(all_paths)), set())
+
+    def thread_flags(self, env: 'Environment') -> T.List[str]:
+        return ['-pthread']
+
+
+class OptlinkDynamicLinker(VisualStudioLikeLinkerMixin, DynamicLinker):
+
+    """Digital Mars dynamic linker for windows."""
+
+    id = 'optlink'
+
+    def __init__(self, exelist: T.List[str], for_machine: mesonlib.MachineChoice,
+                 *, version: str = 'unknown version'):
+        # Use optlink instead of link so we don't interfere with other link.exe
+        # implementations.
+        super().__init__(exelist, for_machine, '', [], version=version)
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_debugfile_args(self, targetfile: str) -> T.List[str]:
+        # Optlink does not generate pdb files.
+        return []
+
+    def get_always_args(self) -> T.List[str]:
+        return []
+
+
+class CudaLinker(PosixDynamicLinkerMixin, DynamicLinker):
+    """Cuda linker (nvlink)"""
+
+    id = 'nvlink'
+
+    @staticmethod
+    def parse_version() -> str:
+        version_cmd = ['nvlink', '--version']
+        try:
+            _, out, _ = mesonlib.Popen_safe(version_cmd)
+        except OSError:
+            return 'unknown version'
+        # Output example:
+        # nvlink: NVIDIA (R) Cuda linker
+        # Copyright (c) 2005-2018 NVIDIA Corporation
+        # Built on Sun_Sep_30_21:09:22_CDT_2018
+        # Cuda compilation tools, release 10.0, V10.0.166
+        # we need the most verbose version output. Luckily starting with V
+        return out.strip().rsplit('V', maxsplit=1)[-1]
+
+    def get_accepts_rsp(self) -> bool:
+        # nvcc does not support response files
+        return False
+
+    def get_lib_prefix(self) -> str:
+        # nvcc doesn't recognize Meson's default .a extension for static libraries on
+        # Windows and passes it to cl as an object file, resulting in 'warning D9024 :
+        # unrecognized source file type 'xxx.a', object file assumed'.
+        #
+        # nvcc's --library= option doesn't help: it takes the library name without the
+        # extension and assumes that the extension on Windows is .lib; prefixing the
+        # library with -Xlinker= seems to work.
+        #
+        # On Linux, we have to use rely on -Xlinker= too, since nvcc/nvlink chokes on
+        # versioned shared libraries:
+        #
+        #   nvcc fatal : Don't know what to do with 'subprojects/foo/libbar.so.0.1.2'
+        #
+        from ..compilers.cuda import CudaCompiler
+        return CudaCompiler.LINKER_PREFIX
+
+    def fatal_warnings(self) -> T.List[str]:
+        return ['--warning-as-error']
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_soname_args(self, env: 'Environment', prefix: str, shlib_name: str,
+                        suffix: str, soversion: str, darwin_versions: T.Tuple[str, str]) -> T.List[str]:
+        return []
+
+
+class MetrowerksLinker(DynamicLinker):
+
+    def __init__(self, exelist: T.List[str], for_machine: mesonlib.MachineChoice,
+                 *, version: str = 'unknown version'):
+        super().__init__(exelist, for_machine, '', [],
+                         version=version)
+
+    def fatal_warnings(self) -> T.List[str]:
+        return ['-w', 'error']
+
+    def get_allow_undefined_args(self) -> T.List[str]:
+        return []
+
+    def get_accepts_rsp(self) -> bool:
+        return True
+
+    def get_lib_prefix(self) -> str:
+        return ""
+
+    def get_linker_always_args(self) -> T.List[str]:
+        return []
+
+    def get_output_args(self, outputname: str) -> T.List[str]:
+        return ['-o', outputname]
+
+    def get_search_args(self, dirname: str) -> T.List[str]:
+        return self._apply_prefix('-L' + dirname)
+
+    def invoked_by_compiler(self) -> bool:
+        return False
+
+    def rsp_file_syntax(self) -> RSPFileSyntax:
+        return RSPFileSyntax.GCC
+
+
+class MetrowerksLinkerARM(MetrowerksLinker):
+    id = 'mwldarm'
+
+
+class MetrowerksLinkerEmbeddedPowerPC(MetrowerksLinker):
+    id = 'mwldeppc'
diff --git a/vendored-meson/meson/mesonbuild/mcompile.py b/vendored-meson/meson/mesonbuild/mcompile.py
new file mode 100644
index 000000000000..4e46702f04fd
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/mcompile.py
@@ -0,0 +1,359 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Entrypoint script for backend agnostic compile."""
+
+import os
+import json
+import re
+import sys
+import shutil
+import typing as T
+from collections import defaultdict
+from pathlib import Path
+
+from . import mlog
+from . import mesonlib
+from .mesonlib import MesonException, RealPathAction, join_args, setup_vsenv
+from mesonbuild.environment import detect_ninja
+from mesonbuild.coredata import UserArrayOption
+from mesonbuild import build
+
+if T.TYPE_CHECKING:
+    import argparse
+
+def array_arg(value: str) -> T.List[str]:
+    return UserArrayOption(None, value, allow_dups=True, user_input=True).value
+
+def validate_builddir(builddir: Path) -> None:
+    if not (builddir / 'meson-private' / 'coredata.dat').is_file():
+        raise MesonException(f'Current directory is not a meson build directory: `{builddir}`.\n'
+                             'Please specify a valid build dir or change the working directory to it.\n'
+                             'It is also possible that the build directory was generated with an old\n'
+                             'meson version. Please regenerate it in this case.')
+
+def parse_introspect_data(builddir: Path) -> T.Dict[str, T.List[dict]]:
+    """
+    Converts a List of name-to-dict to a dict of name-to-dicts (since names are not unique)
+    """
+    path_to_intro = builddir / 'meson-info' / 'intro-targets.json'
+    if not path_to_intro.exists():
+        raise MesonException(f'`{path_to_intro.name}` is missing! Directory is not configured yet?')
+    with path_to_intro.open(encoding='utf-8') as f:
+        schema = json.load(f)
+
+    parsed_data = defaultdict(list) # type: T.Dict[str, T.List[dict]]
+    for target in schema:
+        parsed_data[target['name']] += [target]
+    return parsed_data
+
+class ParsedTargetName:
+    full_name = ''
+    name = ''
+    type = ''
+    path = ''
+
+    def __init__(self, target: str):
+        self.full_name = target
+        split = target.rsplit(':', 1)
+        if len(split) > 1:
+            self.type = split[1]
+            if not self._is_valid_type(self.type):
+                raise MesonException(f'Can\'t invoke target `{target}`: unknown target type: `{self.type}`')
+
+        split = split[0].rsplit('/', 1)
+        if len(split) > 1:
+            self.path = split[0]
+            self.name = split[1]
+        else:
+            self.name = split[0]
+
+    @staticmethod
+    def _is_valid_type(type: str) -> bool:
+        # Amend docs in Commands.md when editing this list
+        allowed_types = {
+            'executable',
+            'static_library',
+            'shared_library',
+            'shared_module',
+            'custom',
+            'alias',
+            'run',
+            'jar',
+        }
+        return type in allowed_types
+
+def get_target_from_intro_data(target: ParsedTargetName, builddir: Path, introspect_data: T.Dict[str, T.Any]) -> T.Dict[str, T.Any]:
+    if target.name not in introspect_data:
+        raise MesonException(f'Can\'t invoke target `{target.full_name}`: target not found')
+
+    intro_targets = introspect_data[target.name]
+    found_targets = []  # type: T.List[T.Dict[str, T.Any]]
+
+    resolved_bdir = builddir.resolve()
+
+    if not target.type and not target.path:
+        found_targets = intro_targets
+    else:
+        for intro_target in intro_targets:
+            if ((target.type and target.type != intro_target['type'].replace(' ', '_')) or
+                (target.path and intro_target['filename'] != 'no_name' and
+                 Path(target.path) != Path(intro_target['filename'][0]).relative_to(resolved_bdir).parent)):
+                continue
+            found_targets += [intro_target]
+
+    if not found_targets:
+        raise MesonException(f'Can\'t invoke target `{target.full_name}`: target not found')
+    elif len(found_targets) > 1:
+        suggestions: T.List[str] = []
+        for i in found_targets:
+            p = Path(i['filename'][0]).relative_to(resolved_bdir).parent / i['name']
+            t = i['type'].replace(' ', '_')
+            suggestions.append(f'- ./{p}:{t}')
+        suggestions_str = '\n'.join(suggestions)
+        raise MesonException(f'Can\'t invoke target `{target.full_name}`: ambiguous name.'
+                             f'Add target type and/or path:\n{suggestions_str}')
+
+    return found_targets[0]
+
+def generate_target_names_ninja(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> T.List[str]:
+    intro_target = get_target_from_intro_data(target, builddir, introspect_data)
+
+    if intro_target['type'] in {'alias', 'run'}:
+        return [target.name]
+    else:
+        return [str(Path(out_file).relative_to(builddir.resolve())) for out_file in intro_target['filename']]
+
+def get_parsed_args_ninja(options: 'argparse.Namespace', builddir: Path) -> T.Tuple[T.List[str], T.Optional[T.Dict[str, str]]]:
+    runner = detect_ninja()
+    if runner is None:
+        raise MesonException('Cannot find ninja.')
+
+    cmd = runner
+    if not builddir.samefile('.'):
+        cmd.extend(['-C', builddir.as_posix()])
+
+    # If the value is set to < 1 then don't set anything, which let's
+    # ninja/samu decide what to do.
+    if options.jobs > 0:
+        cmd.extend(['-j', str(options.jobs)])
+    if options.load_average > 0:
+        cmd.extend(['-l', str(options.load_average)])
+
+    if options.verbose:
+        cmd.append('-v')
+
+    cmd += options.ninja_args
+
+    # operands must be processed after options/option-arguments
+    if options.targets:
+        intro_data = parse_introspect_data(builddir)
+        for t in options.targets:
+            cmd.extend(generate_target_names_ninja(ParsedTargetName(t), builddir, intro_data))
+    if options.clean:
+        cmd.append('clean')
+
+    return cmd, None
+
+def generate_target_name_vs(target: ParsedTargetName, builddir: Path, introspect_data: dict) -> str:
+    intro_target = get_target_from_intro_data(target, builddir, introspect_data)
+
+    assert intro_target['type'] not in {'alias', 'run'}, 'Should not reach here: `run` targets must be handle above'
+
+    # Normalize project name
+    # Source: https://docs.microsoft.com/en-us/visualstudio/msbuild/how-to-build-specific-targets-in-solutions-by-using-msbuild-exe
+    target_name = re.sub(r"[\%\$\@\;\.\(\)']", '_', intro_target['id'])  # type: str
+    rel_path = Path(intro_target['filename'][0]).relative_to(builddir.resolve()).parent
+    if rel_path != Path('.'):
+        target_name = str(rel_path / target_name)
+    return target_name
+
+def get_parsed_args_vs(options: 'argparse.Namespace', builddir: Path) -> T.Tuple[T.List[str], T.Optional[T.Dict[str, str]]]:
+    slns = list(builddir.glob('*.sln'))
+    assert len(slns) == 1, 'More than one solution in a project?'
+    sln = slns[0]
+
+    cmd = ['msbuild']
+
+    if options.targets:
+        intro_data = parse_introspect_data(builddir)
+        has_run_target = any(
+            get_target_from_intro_data(ParsedTargetName(t), builddir, intro_data)['type'] in {'alias', 'run'}
+            for t in options.targets)
+
+        if has_run_target:
+            # `run` target can't be used the same way as other targets on `vs` backend.
+            # They are defined as disabled projects, which can't be invoked as `.sln`
+            # target and have to be invoked directly as project instead.
+            # Issue: https://github.com/microsoft/msbuild/issues/4772
+
+            if len(options.targets) > 1:
+                raise MesonException('Only one target may be specified when `run` target type is used on this backend.')
+            intro_target = get_target_from_intro_data(ParsedTargetName(options.targets[0]), builddir, intro_data)
+            proj_dir = Path(intro_target['filename'][0]).parent
+            proj = proj_dir/'{}.vcxproj'.format(intro_target['id'])
+            cmd += [str(proj.resolve())]
+        else:
+            cmd += [str(sln.resolve())]
+            cmd.extend(['-target:{}'.format(generate_target_name_vs(ParsedTargetName(t), builddir, intro_data)) for t in options.targets])
+    else:
+        cmd += [str(sln.resolve())]
+
+    if options.clean:
+        cmd.extend(['-target:Clean'])
+
+    # In msbuild `-maxCpuCount` with no number means "detect cpus", the default is `-maxCpuCount:1`
+    if options.jobs > 0:
+        cmd.append(f'-maxCpuCount:{options.jobs}')
+    else:
+        cmd.append('-maxCpuCount')
+
+    if options.load_average:
+        mlog.warning('Msbuild does not have a load-average switch, ignoring.')
+
+    if not options.verbose:
+        cmd.append('-verbosity:minimal')
+
+    cmd += options.vs_args
+
+    # Remove platform from env if set so that msbuild does not
+    # pick x86 platform when solution platform is Win32
+    env = os.environ.copy()
+    env.pop('PLATFORM', None)
+
+    return cmd, env
+
+def get_parsed_args_xcode(options: 'argparse.Namespace', builddir: Path) -> T.Tuple[T.List[str], T.Optional[T.Dict[str, str]]]:
+    runner = 'xcodebuild'
+    if not shutil.which(runner):
+        raise MesonException('Cannot find xcodebuild, did you install XCode?')
+
+    # No argument to switch directory
+    os.chdir(str(builddir))
+
+    cmd = [runner, '-parallelizeTargets']
+
+    if options.targets:
+        for t in options.targets:
+            cmd += ['-target', t]
+
+    if options.clean:
+        if options.targets:
+            cmd += ['clean']
+        else:
+            cmd += ['-alltargets', 'clean']
+        # Otherwise xcodebuild tries to delete the builddir and fails
+        cmd += ['-UseNewBuildSystem=FALSE']
+
+    if options.jobs > 0:
+        cmd.extend(['-jobs', str(options.jobs)])
+
+    if options.load_average > 0:
+        mlog.warning('xcodebuild does not have a load-average switch, ignoring')
+
+    if options.verbose:
+        # xcodebuild is already quite verbose, and -quiet doesn't print any
+        # status messages
+        pass
+
+    cmd += options.xcode_args
+    return cmd, None
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+    """Add compile specific arguments."""
+    parser.add_argument(
+        'targets',
+        metavar='TARGET',
+        nargs='*',
+        default=None,
+        help='Targets to build. Target has the following format: [PATH_TO_TARGET/]TARGET_NAME[:TARGET_TYPE].')
+    parser.add_argument(
+        '--clean',
+        action='store_true',
+        help='Clean the build directory.'
+    )
+    parser.add_argument('-C', dest='wd', action=RealPathAction,
+                        help='directory to cd into before running')
+
+    parser.add_argument(
+        '-j', '--jobs',
+        action='store',
+        default=0,
+        type=int,
+        help='The number of worker jobs to run (if supported). If the value is less than 1 the build program will guess.'
+    )
+    parser.add_argument(
+        '-l', '--load-average',
+        action='store',
+        default=0,
+        type=float,
+        help='The system load average to try to maintain (if supported).'
+    )
+    parser.add_argument(
+        '-v', '--verbose',
+        action='store_true',
+        help='Show more verbose output.'
+    )
+    parser.add_argument(
+        '--ninja-args',
+        type=array_arg,
+        default=[],
+        help='Arguments to pass to `ninja` (applied only on `ninja` backend).'
+    )
+    parser.add_argument(
+        '--vs-args',
+        type=array_arg,
+        default=[],
+        help='Arguments to pass to `msbuild` (applied only on `vs` backend).'
+    )
+    parser.add_argument(
+        '--xcode-args',
+        type=array_arg,
+        default=[],
+        help='Arguments to pass to `xcodebuild` (applied only on `xcode` backend).'
+    )
+
+def run(options: 'argparse.Namespace') -> int:
+    bdir = Path(options.wd)
+    validate_builddir(bdir)
+    if options.targets and options.clean:
+        raise MesonException('`TARGET` and `--clean` can\'t be used simultaneously')
+
+    b = build.load(options.wd)
+    cdata = b.environment.coredata
+    need_vsenv = T.cast('bool', cdata.get_option(mesonlib.OptionKey('vsenv')))
+    if setup_vsenv(need_vsenv):
+        mlog.log(mlog.green('INFO:'), 'automatically activated MSVC compiler environment')
+
+    cmd = []    # type: T.List[str]
+    env = None  # type: T.Optional[T.Dict[str, str]]
+
+    backend = cdata.get_option(mesonlib.OptionKey('backend'))
+    assert isinstance(backend, str)
+    mlog.log(mlog.green('INFO:'), 'autodetecting backend as', backend)
+    if backend == 'ninja':
+        cmd, env = get_parsed_args_ninja(options, bdir)
+    elif backend.startswith('vs'):
+        cmd, env = get_parsed_args_vs(options, bdir)
+    elif backend == 'xcode':
+        cmd, env = get_parsed_args_xcode(options, bdir)
+    else:
+        raise MesonException(
+            f'Backend `{backend}` is not yet supported by `compile`. Use generated project files directly instead.')
+
+    mlog.log(mlog.green('INFO:'), 'calculating backend command to run:', join_args(cmd))
+    p, *_ = mesonlib.Popen_safe(cmd, stdout=sys.stdout.buffer, stderr=sys.stderr.buffer, env=env)
+
+    return p.returncode
diff --git a/vendored-meson/meson/mesonbuild/mconf.py b/vendored-meson/meson/mesonbuild/mconf.py
new file mode 100644
index 000000000000..ddd2ee2d7ff8
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/mconf.py
@@ -0,0 +1,336 @@
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import itertools
+import shutil
+import os
+import textwrap
+import typing as T
+import collections
+
+from . import build
+from . import coredata
+from . import environment
+from . import mesonlib
+from . import mintro
+from . import mlog
+from .ast import AstIDGenerator
+from .mesonlib import MachineChoice, OptionKey
+
+if T.TYPE_CHECKING:
+    import argparse
+
+    # cannot be TV_Loggable, because non-ansidecorators do direct string concat
+    LOGLINE = T.Union[str, mlog.AnsiDecorator]
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+    coredata.register_builtin_arguments(parser)
+    parser.add_argument('builddir', nargs='?', default='.')
+    parser.add_argument('--clearcache', action='store_true', default=False,
+                        help='Clear cached state (e.g. found dependencies)')
+    parser.add_argument('--no-pager', action='store_false', dest='pager',
+                        help='Do not redirect output to a pager')
+
+def stringify(val: T.Any) -> str:
+    if isinstance(val, bool):
+        return str(val).lower()
+    elif isinstance(val, list):
+        s = ', '.join(stringify(i) for i in val)
+        return f'[{s}]'
+    elif val is None:
+        return ''
+    else:
+        return str(val)
+
+
+class ConfException(mesonlib.MesonException):
+    pass
+
+
+class Conf:
+    def __init__(self, build_dir: str):
+        self.build_dir = os.path.abspath(os.path.realpath(build_dir))
+        if 'meson.build' in [os.path.basename(self.build_dir), self.build_dir]:
+            self.build_dir = os.path.dirname(self.build_dir)
+        self.build = None
+        self.max_choices_line_length = 60
+        self.name_col: T.List[LOGLINE] = []
+        self.value_col: T.List[LOGLINE] = []
+        self.choices_col: T.List[LOGLINE] = []
+        self.descr_col: T.List[LOGLINE] = []
+        self.all_subprojects: T.Set[str] = set()
+
+        if os.path.isdir(os.path.join(self.build_dir, 'meson-private')):
+            self.build = build.load(self.build_dir)
+            self.source_dir = self.build.environment.get_source_dir()
+            self.coredata = self.build.environment.coredata
+            self.default_values_only = False
+        elif os.path.isfile(os.path.join(self.build_dir, environment.build_filename)):
+            # Make sure that log entries in other parts of meson don't interfere with the JSON output
+            with mlog.no_logging():
+                self.source_dir = os.path.abspath(os.path.realpath(self.build_dir))
+                intr = mintro.IntrospectionInterpreter(self.source_dir, '', 'ninja', visitors = [AstIDGenerator()])
+                intr.analyze()
+            self.coredata = intr.coredata
+            self.default_values_only = True
+        else:
+            raise ConfException(f'Directory {build_dir} is neither a Meson build directory nor a project source directory.')
+
+    def clear_cache(self) -> None:
+        self.coredata.clear_cache()
+
+    def set_options(self, options: T.Dict[OptionKey, str]) -> bool:
+        return self.coredata.set_options(options)
+
+    def save(self) -> None:
+        # Do nothing when using introspection
+        if self.default_values_only:
+            return
+        coredata.save(self.coredata, self.build_dir)
+        # We don't write the build file because any changes to it
+        # are erased when Meson is executed the next time, i.e. when
+        # Ninja is run.
+
+    def print_aligned(self) -> None:
+        """Do the actual printing.
+
+        This prints the generated output in an aligned, pretty form. it aims
+        for a total width of 160 characters, but will use whatever the tty
+        reports it's value to be. Though this is much wider than the standard
+        80 characters of terminals, and even than the newer 120, compressing
+        it to those lengths makes the output hard to read.
+
+        Each column will have a specific width, and will be line wrapped.
+        """
+        total_width = shutil.get_terminal_size(fallback=(160, 0))[0]
+        _col = max(total_width // 5, 20)
+        last_column = total_width - (3 * _col) - 3
+        four_column = (_col, _col, _col, last_column if last_column > 1 else _col)
+
+        for line in zip(self.name_col, self.value_col, self.choices_col, self.descr_col):
+            if not any(line):
+                mlog.log('')
+                continue
+
+            # This is a header, like `Subproject foo:`,
+            # We just want to print that and get on with it
+            if line[0] and not any(line[1:]):
+                mlog.log(line[0])
+                continue
+
+            def wrap_text(text: LOGLINE, width: int) -> mlog.TV_LoggableList:
+                raw = text.text if isinstance(text, mlog.AnsiDecorator) else text
+                indent = ' ' if raw.startswith('[') else ''
+                wrapped_ = textwrap.wrap(raw, width, subsequent_indent=indent)
+                # We cast this because https://github.com/python/mypy/issues/1965
+                # mlog.TV_LoggableList does not provide __len__ for stringprotocol
+                if isinstance(text, mlog.AnsiDecorator):
+                    wrapped = T.cast('T.List[LOGLINE]', [mlog.AnsiDecorator(i, text.code) for i in wrapped_])
+                else:
+                    wrapped = T.cast('T.List[LOGLINE]', wrapped_)
+                # Add padding here to get even rows, as `textwrap.wrap()` will
+                # only shorten, not lengthen each item
+                return [str(i) + ' ' * (width - len(i)) for i in wrapped]
+
+            # wrap will take a long string, and create a list of strings no
+            # longer than the size given. Then that list can be zipped into, to
+            # print each line of the output, such the that columns are printed
+            # to the right width, row by row.
+            name = wrap_text(line[0], four_column[0])
+            val = wrap_text(line[1], four_column[1])
+            choice = wrap_text(line[2], four_column[2])
+            desc = wrap_text(line[3], four_column[3])
+            for l in itertools.zip_longest(name, val, choice, desc, fillvalue=''):
+                items = [l[i] if l[i] else ' ' * four_column[i] for i in range(4)]
+                mlog.log(*items)
+
+    def split_options_per_subproject(self, options: 'coredata.KeyedOptionDictType') -> T.Dict[str, 'coredata.MutableKeyedOptionDictType']:
+        result: T.Dict[str, 'coredata.MutableKeyedOptionDictType'] = {}
+        for k, o in options.items():
+            if k.subproject:
+                self.all_subprojects.add(k.subproject)
+            result.setdefault(k.subproject, {})[k] = o
+        return result
+
+    def _add_line(self, name: LOGLINE, value: LOGLINE, choices: LOGLINE, descr: LOGLINE) -> None:
+        if isinstance(name, mlog.AnsiDecorator):
+            name.text = ' ' * self.print_margin + name.text
+        else:
+            name = ' ' * self.print_margin + name
+        self.name_col.append(name)
+        self.value_col.append(value)
+        self.choices_col.append(choices)
+        self.descr_col.append(descr)
+
+    def add_option(self, name: str, descr: str, value: T.Any, choices: T.Any) -> None:
+        value = stringify(value)
+        choices = stringify(choices)
+        self._add_line(mlog.green(name), mlog.yellow(value), mlog.blue(choices), descr)
+
+    def add_title(self, title: str) -> None:
+        newtitle = mlog.cyan(title)
+        descr = mlog.cyan('Description')
+        value = mlog.cyan('Default Value' if self.default_values_only else 'Current Value')
+        choices = mlog.cyan('Possible Values')
+        self._add_line('', '', '', '')
+        self._add_line(newtitle, value, choices, descr)
+        self._add_line('-' * len(newtitle), '-' * len(value), '-' * len(choices), '-' * len(descr))
+
+    def add_section(self, section: str) -> None:
+        self.print_margin = 0
+        self._add_line('', '', '', '')
+        self._add_line(mlog.normal_yellow(section + ':'), '', '', '')
+        self.print_margin = 2
+
+    def print_options(self, title: str, options: 'coredata.KeyedOptionDictType') -> None:
+        if not options:
+            return
+        if title:
+            self.add_title(title)
+        auto = T.cast('coredata.UserFeatureOption', self.coredata.options[OptionKey('auto_features')])
+        for k, o in sorted(options.items()):
+            printable_value = o.printable_value()
+            root = k.as_root()
+            if o.yielding and k.subproject and root in self.coredata.options:
+                printable_value = ''
+            if isinstance(o, coredata.UserFeatureOption) and o.is_auto():
+                printable_value = auto.printable_value()
+            self.add_option(str(root), o.description, printable_value, o.choices)
+
+    def print_conf(self, pager: bool) -> None:
+        if pager:
+            mlog.start_pager()
+
+        def print_default_values_warning() -> None:
+            mlog.warning('The source directory instead of the build directory was specified.')
+            mlog.warning('Only the default values for the project are printed.')
+
+        if self.default_values_only:
+            print_default_values_warning()
+            mlog.log('')
+
+        mlog.log('Core properties:')
+        mlog.log('  Source dir', self.source_dir)
+        if not self.default_values_only:
+            mlog.log('  Build dir ', self.build_dir)
+
+        dir_option_names = set(coredata.BUILTIN_DIR_OPTIONS)
+        test_option_names = {OptionKey('errorlogs'),
+                             OptionKey('stdsplit')}
+
+        dir_options: 'coredata.MutableKeyedOptionDictType' = {}
+        test_options: 'coredata.MutableKeyedOptionDictType' = {}
+        core_options: 'coredata.MutableKeyedOptionDictType' = {}
+        module_options: T.Dict[str, 'coredata.MutableKeyedOptionDictType'] = collections.defaultdict(dict)
+        for k, v in self.coredata.options.items():
+            if k in dir_option_names:
+                dir_options[k] = v
+            elif k in test_option_names:
+                test_options[k] = v
+            elif k.module:
+                # Ignore module options if we did not use that module during
+                # configuration.
+                if self.build and k.module not in self.build.modules:
+                    continue
+                module_options[k.module][k] = v
+            elif k.is_builtin():
+                core_options[k] = v
+
+        host_core_options = self.split_options_per_subproject({k: v for k, v in core_options.items() if k.machine is MachineChoice.HOST})
+        build_core_options = self.split_options_per_subproject({k: v for k, v in core_options.items() if k.machine is MachineChoice.BUILD})
+        host_compiler_options = self.split_options_per_subproject({k: v for k, v in self.coredata.options.items() if k.is_compiler() and k.machine is MachineChoice.HOST})
+        build_compiler_options = self.split_options_per_subproject({k: v for k, v in self.coredata.options.items() if k.is_compiler() and k.machine is MachineChoice.BUILD})
+        project_options = self.split_options_per_subproject({k: v for k, v in self.coredata.options.items() if k.is_project()})
+        show_build_options = self.default_values_only or self.build.environment.is_cross_build()
+
+        self.add_section('Main project options')
+        self.print_options('Core options', host_core_options[''])
+        if show_build_options:
+            self.print_options('', build_core_options[''])
+        self.print_options('Backend options', {k: v for k, v in self.coredata.options.items() if k.is_backend()})
+        self.print_options('Base options', {k: v for k, v in self.coredata.options.items() if k.is_base()})
+        self.print_options('Compiler options', host_compiler_options.get('', {}))
+        if show_build_options:
+            self.print_options('', build_compiler_options.get('', {}))
+        for mod, mod_options in module_options.items():
+            self.print_options(f'{mod} module options', mod_options)
+        self.print_options('Directories', dir_options)
+        self.print_options('Testing options', test_options)
+        self.print_options('Project options', project_options.get('', {}))
+        for subproject in sorted(self.all_subprojects):
+            if subproject == '':
+                continue
+            self.add_section('Subproject ' + subproject)
+            if subproject in host_core_options:
+                self.print_options('Core options', host_core_options[subproject])
+            if subproject in build_core_options and show_build_options:
+                self.print_options('', build_core_options[subproject])
+            if subproject in host_compiler_options:
+                self.print_options('Compiler options', host_compiler_options[subproject])
+            if subproject in build_compiler_options and show_build_options:
+                self.print_options('', build_compiler_options[subproject])
+            if subproject in project_options:
+                self.print_options('Project options', project_options[subproject])
+        self.print_aligned()
+
+        # Print the warning twice so that the user shouldn't be able to miss it
+        if self.default_values_only:
+            mlog.log('')
+            print_default_values_warning()
+
+        self.print_nondefault_buildtype_options()
+
+    def print_nondefault_buildtype_options(self) -> None:
+        mismatching = self.coredata.get_nondefault_buildtype_args()
+        if not mismatching:
+            return
+        mlog.log("\nThe following option(s) have a different value than the build type default\n")
+        mlog.log('               current   default')
+        for m in mismatching:
+            mlog.log(f'{m[0]:21}{m[1]:10}{m[2]:10}')
+
+def run(options: argparse.Namespace) -> int:
+    coredata.parse_cmd_line_options(options)
+    builddir = os.path.abspath(os.path.realpath(options.builddir))
+    print_only = not options.cmd_line_options and not options.clearcache
+    c = None
+    try:
+        c = Conf(builddir)
+        if c.default_values_only and not print_only:
+            raise mesonlib.MesonException('No valid build directory found, cannot modify options.')
+        if c.default_values_only or print_only:
+            c.print_conf(options.pager)
+            return 0
+
+        save = False
+        if options.cmd_line_options:
+            save = c.set_options(options.cmd_line_options)
+            coredata.update_cmd_line_file(builddir, options)
+        if options.clearcache:
+            c.clear_cache()
+            save = True
+        if save:
+            c.save()
+            mintro.update_build_options(c.coredata, c.build.environment.info_dir)
+            mintro.write_meson_info_file(c.build, [])
+    except ConfException as e:
+        mlog.log('Meson configurator encountered an error:')
+        if c is not None and c.build is not None:
+            mintro.write_meson_info_file(c.build, [e])
+        raise e
+    except BrokenPipeError:
+        # Pager quit before we wrote everything.
+        pass
+    return 0
diff --git a/vendored-meson/meson/mesonbuild/mdevenv.py b/vendored-meson/meson/mesonbuild/mdevenv.py
new file mode 100644
index 000000000000..9c298478d87d
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/mdevenv.py
@@ -0,0 +1,231 @@
+from __future__ import annotations
+
+import os, subprocess
+import argparse
+import tempfile
+import shutil
+import itertools
+
+from pathlib import Path
+from . import build, minstall
+from .mesonlib import (MesonException, is_windows, setup_vsenv, OptionKey,
+                       get_wine_shortpath, MachineChoice)
+from . import mlog
+
+import typing as T
+if T.TYPE_CHECKING:
+    from .backends import InstallData
+
+POWERSHELL_EXES = {'pwsh.exe', 'powershell.exe'}
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+    parser.add_argument('-C', dest='builddir', type=Path, default='.',
+                        help='Path to build directory')
+    parser.add_argument('--workdir', '-w', type=Path, default=None,
+                        help='Directory to cd into before running (default: builddir, Since 1.0.0)')
+    parser.add_argument('--dump', nargs='?', const=True,
+                        help='Only print required environment (Since 0.62.0) ' +
+                             'Takes an optional file path (Since 1.1.0)')
+    parser.add_argument('--dump-format', default='export',
+                        choices=['sh', 'export', 'vscode'],
+                        help='Format used with --dump (Since 1.1.0)')
+    parser.add_argument('devcmd', nargs=argparse.REMAINDER, metavar='command',
+                        help='Command to run in developer environment (default: interactive shell)')
+
+def get_windows_shell() -> T.Optional[str]:
+    mesonbuild = Path(__file__).parent
+    script = mesonbuild / 'scripts' / 'cmd_or_ps.ps1'
+    for shell in POWERSHELL_EXES:
+        try:
+            command = [shell, '-noprofile', '-executionpolicy', 'bypass', '-file', str(script)]
+            result = subprocess.check_output(command)
+            return result.decode().strip()
+        except (subprocess.CalledProcessError, OSError):
+            pass
+    return None
+
+def reduce_winepath(env: T.Dict[str, str]) -> None:
+    winepath = env.get('WINEPATH')
+    if not winepath:
+        return
+    winecmd = shutil.which('wine64') or shutil.which('wine')
+    if not winecmd:
+        return
+    env['WINEPATH'] = get_wine_shortpath([winecmd], winepath.split(';'))
+    mlog.log('Meson detected wine and has set WINEPATH accordingly')
+
+def get_env(b: build.Build, dump_fmt: T.Optional[str]) -> T.Tuple[T.Dict[str, str], T.Set[str]]:
+    extra_env = build.EnvironmentVariables()
+    extra_env.set('MESON_DEVENV', ['1'])
+    extra_env.set('MESON_PROJECT_NAME', [b.project_name])
+
+    sysroot = b.environment.properties[MachineChoice.HOST].get_sys_root()
+    if sysroot:
+        extra_env.set('QEMU_LD_PREFIX', [sysroot])
+
+    env = {} if dump_fmt else os.environ.copy()
+    default_fmt = '${0}' if dump_fmt in {'sh', 'export'} else None
+    varnames = set()
+    for i in itertools.chain(b.devenv, {extra_env}):
+        env = i.get_env(env, default_fmt)
+        varnames |= i.get_names()
+
+    reduce_winepath(env)
+
+    return env, varnames
+
+def bash_completion_files(b: build.Build, install_data: 'InstallData') -> T.List[str]:
+    from .dependencies.pkgconfig import PkgConfigDependency
+    result = []
+    dep = PkgConfigDependency('bash-completion', b.environment,
+                              {'required': False, 'silent': True, 'version': '>=2.10'})
+    if dep.found():
+        prefix = b.environment.coredata.get_option(OptionKey('prefix'))
+        assert isinstance(prefix, str), 'for mypy'
+        datadir = b.environment.coredata.get_option(OptionKey('datadir'))
+        assert isinstance(datadir, str), 'for mypy'
+        datadir_abs = os.path.join(prefix, datadir)
+        completionsdir = dep.get_variable(pkgconfig='completionsdir', pkgconfig_define=['datadir', datadir_abs])
+        assert isinstance(completionsdir, str), 'for mypy'
+        completionsdir_path = Path(completionsdir)
+        for f in install_data.data:
+            if completionsdir_path in Path(f.install_path).parents:
+                result.append(f.path)
+    return result
+
+def add_gdb_auto_load(autoload_path: Path, gdb_helper: str, fname: Path) -> None:
+    # Copy or symlink the GDB helper into our private directory tree
+    destdir = autoload_path / fname.parent
+    destdir.mkdir(parents=True, exist_ok=True)
+    try:
+        if is_windows():
+            shutil.copy(gdb_helper, str(destdir / os.path.basename(gdb_helper)))
+        else:
+            os.symlink(gdb_helper, str(destdir / os.path.basename(gdb_helper)))
+    except (FileExistsError, shutil.SameFileError):
+        pass
+
+def write_gdb_script(privatedir: Path, install_data: 'InstallData', workdir: Path) -> None:
+    if not shutil.which('gdb'):
+        return
+    bdir = privatedir.parent
+    autoload_basedir = privatedir / 'gdb-auto-load'
+    autoload_path = Path(autoload_basedir, *bdir.parts[1:])
+    have_gdb_helpers = False
+    for d in install_data.data:
+        if d.path.endswith('-gdb.py') or d.path.endswith('-gdb.gdb') or d.path.endswith('-gdb.scm'):
+            # This GDB helper is made for a specific shared library, search if
+            # we have it in our builddir.
+            libname = Path(d.path).name.rsplit('-', 1)[0]
+            for t in install_data.targets:
+                path = Path(t.fname)
+                if path.name == libname:
+                    add_gdb_auto_load(autoload_path, d.path, path)
+                    have_gdb_helpers = True
+    if have_gdb_helpers:
+        gdbinit_line = f'add-auto-load-scripts-directory {autoload_basedir}\n'
+        gdbinit_path = bdir / '.gdbinit'
+        first_time = False
+        try:
+            with gdbinit_path.open('r+', encoding='utf-8') as f:
+                if gdbinit_line not in f.readlines():
+                    f.write(gdbinit_line)
+                    first_time = True
+        except FileNotFoundError:
+            gdbinit_path.write_text(gdbinit_line, encoding='utf-8')
+            first_time = True
+        if first_time:
+            gdbinit_path = gdbinit_path.resolve()
+            workdir_path = workdir.resolve()
+            rel_path = gdbinit_path.relative_to(workdir_path)
+            mlog.log('Meson detected GDB helpers and added config in', mlog.bold(str(rel_path)))
+            mlog.log('To load it automatically you might need to:')
+            mlog.log(' - Add', mlog.bold(f'add-auto-load-safe-path {gdbinit_path.parent}'),
+                     'in', mlog.bold('~/.gdbinit'))
+            if gdbinit_path.parent != workdir_path:
+                mlog.log(' - Change current workdir to', mlog.bold(str(rel_path.parent)),
+                         'or use', mlog.bold(f'--init-command {rel_path}'))
+
+def dump(devenv: T.Dict[str, str], varnames: T.Set[str], dump_format: T.Optional[str], output: T.Optional[T.TextIO] = None) -> None:
+    for name in varnames:
+        print(f'{name}="{devenv[name]}"', file=output)
+        if dump_format == 'export':
+            print(f'export {name}', file=output)
+
+def run(options: argparse.Namespace) -> int:
+    privatedir = Path(options.builddir) / 'meson-private'
+    buildfile = privatedir / 'build.dat'
+    if not buildfile.is_file():
+        raise MesonException(f'Directory {options.builddir!r} does not seem to be a Meson build directory.')
+    b = build.load(options.builddir)
+    workdir = options.workdir or options.builddir
+
+    need_vsenv = T.cast('bool', b.environment.coredata.get_option(OptionKey('vsenv')))
+    setup_vsenv(need_vsenv) # Call it before get_env to get vsenv vars as well
+    dump_fmt = options.dump_format if options.dump else None
+    devenv, varnames = get_env(b, dump_fmt)
+    if options.dump:
+        if options.devcmd:
+            raise MesonException('--dump option does not allow running other command.')
+        if options.dump is True:
+            dump(devenv, varnames, dump_fmt)
+        else:
+            with open(options.dump, "w", encoding='utf-8') as output:
+                dump(devenv, varnames, dump_fmt, output)
+        return 0
+
+    if b.environment.need_exe_wrapper():
+        m = 'An executable wrapper could be required'
+        exe_wrapper = b.environment.get_exe_wrapper()
+        if exe_wrapper:
+            cmd = ' '.join(exe_wrapper.get_command())
+            m += f': {cmd}'
+        mlog.log(m)
+
+    install_data = minstall.load_install_data(str(privatedir / 'install.dat'))
+    write_gdb_script(privatedir, install_data, workdir)
+
+    args = options.devcmd
+    if not args:
+        prompt_prefix = f'[{b.project_name}]'
+        shell_env = os.environ.get("SHELL")
+        # Prefer $SHELL in a MSYS2 bash despite it being Windows
+        if shell_env and os.path.exists(shell_env):
+            args = [shell_env]
+        elif is_windows():
+            shell = get_windows_shell()
+            if not shell:
+                mlog.warning('Failed to determine Windows shell, fallback to cmd.exe')
+            if shell in POWERSHELL_EXES:
+                args = [shell, '-NoLogo', '-NoExit']
+                prompt = f'function global:prompt {{  "{prompt_prefix} PS " + $PWD + "> "}}'
+                args += ['-Command', prompt]
+            else:
+                args = [os.environ.get("COMSPEC", r"C:\WINDOWS\system32\cmd.exe")]
+                args += ['/k', f'prompt {prompt_prefix} $P$G']
+        else:
+            args = [os.environ.get("SHELL", os.path.realpath("/bin/sh"))]
+        if "bash" in args[0]:
+            # Let the GC remove the tmp file
+            tmprc = tempfile.NamedTemporaryFile(mode='w')
+            tmprc.write('[ -e ~/.bashrc ] && . ~/.bashrc\n')
+            if not os.environ.get("MESON_DISABLE_PS1_OVERRIDE"):
+                tmprc.write(f'export PS1="{prompt_prefix} $PS1"\n')
+            for f in bash_completion_files(b, install_data):
+                tmprc.write(f'. "{f}"\n')
+            tmprc.flush()
+            args.append("--rcfile")
+            args.append(tmprc.name)
+    else:
+        # Try to resolve executable using devenv's PATH
+        abs_path = shutil.which(args[0], path=devenv.get('PATH', None))
+        args[0] = abs_path or args[0]
+
+    try:
+        return subprocess.call(args, close_fds=False,
+                               env=devenv,
+                               cwd=workdir)
+    except subprocess.CalledProcessError as e:
+        return e.returncode
+    except FileNotFoundError:
+        raise MesonException(f'Command not found: {args[0]}')
diff --git a/vendored-meson/meson/mesonbuild/mdist.py b/vendored-meson/meson/mesonbuild/mdist.py
new file mode 100644
index 000000000000..089056b35a79
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/mdist.py
@@ -0,0 +1,383 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+
+import abc
+import argparse
+import gzip
+import os
+import sys
+import shlex
+import shutil
+import subprocess
+import tarfile
+import tempfile
+import hashlib
+import typing as T
+
+from dataclasses import dataclass
+from glob import glob
+from pathlib import Path
+from mesonbuild.environment import detect_ninja
+from mesonbuild.mesonlib import (MesonException, RealPathAction, quiet_git,
+                                 windows_proof_rmtree, setup_vsenv, OptionKey)
+from mesonbuild.msetup import add_arguments as msetup_argparse
+from mesonbuild.wrap import wrap
+from mesonbuild import mlog, build, coredata
+from .scripts.meson_exe import run_exe
+
+if T.TYPE_CHECKING:
+    from ._typing import ImmutableListProtocol
+    from .mesonlib import ExecutableSerialisation
+
+archive_choices = ['gztar', 'xztar', 'zip']
+
+archive_extension = {'gztar': '.tar.gz',
+                     'xztar': '.tar.xz',
+                     'zip': '.zip'}
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+    parser.add_argument('-C', dest='wd', action=RealPathAction,
+                        help='directory to cd into before running')
+    parser.add_argument('--allow-dirty', action='store_true',
+                        help='Allow even when repository contains uncommitted changes.')
+    parser.add_argument('--formats', default='xztar',
+                        help='Comma separated list of archive types to create. Supports xztar (default), gztar, and zip.')
+    parser.add_argument('--include-subprojects', action='store_true',
+                        help='Include source code of subprojects that have been used for the build.')
+    parser.add_argument('--no-tests', action='store_true',
+                        help='Do not build and test generated packages.')
+
+
+def create_hash(fname: str) -> None:
+    hashname = fname + '.sha256sum'
+    m = hashlib.sha256()
+    m.update(open(fname, 'rb').read())
+    with open(hashname, 'w', encoding='utf-8') as f:
+        # A space and an asterisk because that is the format defined by GNU coreutils
+        # and accepted by busybox and the Perl shasum tool.
+        f.write('{} *{}\n'.format(m.hexdigest(), os.path.basename(fname)))
+
+
+msg_uncommitted_changes = 'Repository has uncommitted changes that will not be included in the dist tarball'
+
+def handle_dirty_opt(msg: str, allow_dirty: bool) -> None:
+    if allow_dirty:
+        mlog.warning(msg)
+    else:
+        mlog.error(msg + '\n' + 'Use --allow-dirty to ignore the warning and proceed anyway')
+        sys.exit(1)
+
+def is_git(src_root: str) -> bool:
+    '''
+    Checks if meson.build file at the root source directory is tracked by git.
+    It could be a subproject part of the parent project git repository.
+    '''
+    return quiet_git(['ls-files', '--error-unmatch', 'meson.build'], src_root)[0]
+
+def is_hg(src_root: str) -> bool:
+    return os.path.isdir(os.path.join(src_root, '.hg'))
+
+
+@dataclass
+class Dist(metaclass=abc.ABCMeta):
+    dist_name: str
+    src_root: str
+    bld_root: str
+    dist_scripts: T.List[ExecutableSerialisation]
+    subprojects: T.Dict[str, str]
+    options: argparse.Namespace
+
+    def __post_init__(self) -> None:
+        self.dist_sub = os.path.join(self.bld_root, 'meson-dist')
+        self.distdir = os.path.join(self.dist_sub, self.dist_name)
+
+    @abc.abstractmethod
+    def create_dist(self, archives: T.List[str]) -> T.List[str]:
+        pass
+
+    def run_dist_scripts(self) -> None:
+        assert os.path.isabs(self.distdir)
+        env = {}
+        env['MESON_DIST_ROOT'] = self.distdir
+        env['MESON_SOURCE_ROOT'] = self.src_root
+        env['MESON_BUILD_ROOT'] = self.bld_root
+        for d in self.dist_scripts:
+            if d.subproject and d.subproject not in self.subprojects:
+                continue
+            subdir = self.subprojects.get(d.subproject, '')
+            env['MESON_PROJECT_DIST_ROOT'] = os.path.join(self.distdir, subdir)
+            env['MESON_PROJECT_SOURCE_ROOT'] = os.path.join(self.src_root, subdir)
+            env['MESON_PROJECT_BUILD_ROOT'] = os.path.join(self.bld_root, subdir)
+            name = ' '.join(d.cmd_args)
+            print(f'Running custom dist script {name!r}')
+            try:
+                rc = run_exe(d, env)
+                if rc != 0:
+                    sys.exit('Dist script errored out')
+            except OSError:
+                print(f'Failed to run dist script {name!r}')
+                sys.exit(1)
+
+
+class GitDist(Dist):
+    def git_root(self, dir_: str) -> Path:
+        # Cannot use --show-toplevel here because git in our CI prints cygwin paths
+        # that python cannot resolve. Workaround this by taking parent of src_root.
+        prefix = quiet_git(['rev-parse', '--show-prefix'], dir_, check=True)[1].strip()
+        if not prefix:
+            return Path(dir_)
+        prefix_level = len(Path(prefix).parents)
+        return Path(dir_).parents[prefix_level - 1]
+
+    def have_dirty_index(self) -> bool:
+        '''Check whether there are uncommitted changes in git'''
+        ret = subprocess.call(['git', '-C', self.src_root, 'diff-index', '--quiet', 'HEAD'])
+        return ret == 1
+
+    def copy_git(self, src: T.Union[str, os.PathLike], distdir: str, revision: str = 'HEAD',
+                 prefix: T.Optional[str] = None, subdir: T.Optional[str] = None) -> None:
+        cmd = ['git', 'archive', '--format', 'tar', revision]
+        if prefix is not None:
+            cmd.insert(2, f'--prefix={prefix}/')
+        if subdir is not None:
+            cmd.extend(['--', subdir])
+        with tempfile.TemporaryFile() as f:
+            subprocess.check_call(cmd, cwd=src, stdout=f)
+            f.seek(0)
+            t = tarfile.open(fileobj=f) # [ignore encoding]
+            t.extractall(path=distdir)
+
+    def process_git_project(self, src_root: str, distdir: str) -> None:
+        if self.have_dirty_index():
+            handle_dirty_opt(msg_uncommitted_changes, self.options.allow_dirty)
+        if os.path.exists(distdir):
+            windows_proof_rmtree(distdir)
+        repo_root = self.git_root(src_root)
+        if repo_root.samefile(src_root):
+            os.makedirs(distdir)
+            self.copy_git(src_root, distdir)
+        else:
+            subdir = Path(src_root).relative_to(repo_root)
+            tmp_distdir = distdir + '-tmp'
+            if os.path.exists(tmp_distdir):
+                windows_proof_rmtree(tmp_distdir)
+            os.makedirs(tmp_distdir)
+            self.copy_git(repo_root, tmp_distdir, subdir=str(subdir))
+            Path(tmp_distdir, subdir).rename(distdir)
+            windows_proof_rmtree(tmp_distdir)
+        self.process_submodules(src_root, distdir)
+
+    def process_submodules(self, src: str, distdir: str) -> None:
+        module_file = os.path.join(src, '.gitmodules')
+        if not os.path.exists(module_file):
+            return
+        cmd = ['git', 'submodule', 'status', '--cached', '--recursive']
+        modlist = subprocess.check_output(cmd, cwd=src, universal_newlines=True).splitlines()
+        for submodule in modlist:
+            status = submodule[:1]
+            sha1, rest = submodule[1:].split(' ', 1)
+            subpath = rest.rsplit(' ', 1)[0]
+
+            if status == '-':
+                mlog.warning(f'Submodule {subpath!r} is not checked out and cannot be added to the dist')
+                continue
+            elif status in {'+', 'U'}:
+                handle_dirty_opt(f'Submodule {subpath!r} has uncommitted changes that will not be included in the dist tarball', self.options.allow_dirty)
+
+            self.copy_git(os.path.join(src, subpath), distdir, revision=sha1, prefix=subpath)
+
+    def create_dist(self, archives: T.List[str]) -> T.List[str]:
+        self.process_git_project(self.src_root, self.distdir)
+        for path in self.subprojects.values():
+            sub_src_root = os.path.join(self.src_root, path)
+            sub_distdir = os.path.join(self.distdir, path)
+            if os.path.exists(sub_distdir):
+                continue
+            if is_git(sub_src_root):
+                self.process_git_project(sub_src_root, sub_distdir)
+            else:
+                shutil.copytree(sub_src_root, sub_distdir)
+        self.run_dist_scripts()
+        output_names = []
+        for a in archives:
+            compressed_name = self.distdir + archive_extension[a]
+            shutil.make_archive(self.distdir, a, root_dir=self.dist_sub, base_dir=self.dist_name)
+            output_names.append(compressed_name)
+        windows_proof_rmtree(self.distdir)
+        return output_names
+
+
+class HgDist(Dist):
+    def have_dirty_index(self) -> bool:
+        '''Check whether there are uncommitted changes in hg'''
+        out = subprocess.check_output(['hg', '-R', self.src_root, 'summary'])
+        return b'commit: (clean)' not in out
+
+    def create_dist(self, archives: T.List[str]) -> T.List[str]:
+        if self.have_dirty_index():
+            handle_dirty_opt(msg_uncommitted_changes, self.options.allow_dirty)
+        if self.dist_scripts:
+            mlog.warning('dist scripts are not supported in Mercurial projects')
+
+        os.makedirs(self.dist_sub, exist_ok=True)
+        tarname = os.path.join(self.dist_sub, self.dist_name + '.tar')
+        xzname = tarname + '.xz'
+        gzname = tarname + '.gz'
+        zipname = os.path.join(self.dist_sub, self.dist_name + '.zip')
+        # Note that -X interprets relative paths using the current working
+        # directory, not the repository root, so this must be an absolute path:
+        # https://bz.mercurial-scm.org/show_bug.cgi?id=6267
+        #
+        # .hg[a-z]* is used instead of .hg* to keep .hg_archival.txt, which may
+        # be useful to link the tarball to the Mercurial revision for either
+        # manual inspection or in case any code interprets it for a --version or
+        # similar.
+        subprocess.check_call(['hg', 'archive', '-R', self.src_root, '-S', '-t', 'tar',
+                               '-X', self.src_root + '/.hg[a-z]*', tarname])
+        output_names = []
+        if 'xztar' in archives:
+            import lzma
+            with lzma.open(xzname, 'wb') as xf, open(tarname, 'rb') as tf:
+                shutil.copyfileobj(tf, xf)
+            output_names.append(xzname)
+        if 'gztar' in archives:
+            with gzip.open(gzname, 'wb') as zf, open(tarname, 'rb') as tf:
+                shutil.copyfileobj(tf, zf)
+            output_names.append(gzname)
+        os.unlink(tarname)
+        if 'zip' in archives:
+            subprocess.check_call(['hg', 'archive', '-R', self.src_root, '-S', '-t', 'zip', zipname])
+            output_names.append(zipname)
+        return output_names
+
+
+def run_dist_steps(meson_command: T.List[str], unpacked_src_dir: str, builddir: str, installdir: str, ninja_args: T.List[str]) -> int:
+    if subprocess.call(meson_command + ['--backend=ninja', unpacked_src_dir, builddir]) != 0:
+        print('Running Meson on distribution package failed')
+        return 1
+    if subprocess.call(ninja_args, cwd=builddir) != 0:
+        print('Compiling the distribution package failed')
+        return 1
+    if subprocess.call(ninja_args + ['test'], cwd=builddir) != 0:
+        print('Running unit tests on the distribution package failed')
+        return 1
+    myenv = os.environ.copy()
+    myenv['DESTDIR'] = installdir
+    if subprocess.call(ninja_args + ['install'], cwd=builddir, env=myenv) != 0:
+        print('Installing the distribution package failed')
+        return 1
+    return 0
+
+def check_dist(packagename: str, meson_command: ImmutableListProtocol[str], extra_meson_args: T.List[str], bld_root: str, privdir: str) -> int:
+    print(f'Testing distribution package {packagename}')
+    unpackdir = os.path.join(privdir, 'dist-unpack')
+    builddir = os.path.join(privdir, 'dist-build')
+    installdir = os.path.join(privdir, 'dist-install')
+    for p in (unpackdir, builddir, installdir):
+        if os.path.exists(p):
+            windows_proof_rmtree(p)
+        os.mkdir(p)
+    ninja_args = detect_ninja()
+    shutil.unpack_archive(packagename, unpackdir)
+    unpacked_files = glob(os.path.join(unpackdir, '*'))
+    assert len(unpacked_files) == 1
+    unpacked_src_dir = unpacked_files[0]
+    meson_command += ['setup']
+    meson_command += create_cmdline_args(bld_root)
+    meson_command += extra_meson_args
+
+    ret = run_dist_steps(meson_command, unpacked_src_dir, builddir, installdir, ninja_args)
+    if ret > 0:
+        print(f'Dist check build directory was {builddir}')
+    else:
+        windows_proof_rmtree(unpackdir)
+        windows_proof_rmtree(builddir)
+        windows_proof_rmtree(installdir)
+        print(f'Distribution package {packagename} tested')
+    return ret
+
+def create_cmdline_args(bld_root: str) -> T.List[str]:
+    parser = argparse.ArgumentParser()
+    msetup_argparse(parser)
+    args = parser.parse_args([])
+    coredata.parse_cmd_line_options(args)
+    coredata.read_cmd_line_file(bld_root, args)
+    args.cmd_line_options.pop(OptionKey('backend'), '')
+    return shlex.split(coredata.format_cmd_line_options(args))
+
+def determine_archives_to_generate(options: argparse.Namespace) -> T.List[str]:
+    result = []
+    for i in options.formats.split(','):
+        if i not in archive_choices:
+            sys.exit(f'Value "{i}" not one of permitted values {archive_choices}.')
+        result.append(i)
+    if len(i) == 0:
+        sys.exit('No archive types specified.')
+    return result
+
+def run(options: argparse.Namespace) -> int:
+    buildfile = Path(options.wd) / 'meson-private' / 'build.dat'
+    if not buildfile.is_file():
+        raise MesonException(f'Directory {options.wd!r} does not seem to be a Meson build directory.')
+    b = build.load(options.wd)
+    need_vsenv = T.cast('bool', b.environment.coredata.get_option(OptionKey('vsenv')))
+    setup_vsenv(need_vsenv)
+    # This import must be load delayed, otherwise it will get the default
+    # value of None.
+    from mesonbuild.mesonlib import get_meson_command
+    src_root = b.environment.source_dir
+    bld_root = b.environment.build_dir
+    priv_dir = os.path.join(bld_root, 'meson-private')
+
+    dist_name = b.project_name + '-' + b.project_version
+
+    archives = determine_archives_to_generate(options)
+
+    subprojects = {}
+    extra_meson_args = []
+    if options.include_subprojects:
+        subproject_dir = os.path.join(src_root, b.subproject_dir)
+        for sub in b.subprojects:
+            directory = wrap.get_directory(subproject_dir, sub)
+            subprojects[sub] = os.path.join(b.subproject_dir, directory)
+        extra_meson_args.append('-Dwrap_mode=nodownload')
+
+    cls: T.Type[Dist]
+    if is_git(src_root):
+        cls = GitDist
+    elif is_hg(src_root):
+        if subprojects:
+            print('--include-subprojects option currently not supported with Mercurial')
+            return 1
+        cls = HgDist
+    else:
+        print('Dist currently only works with Git or Mercurial repos')
+        return 1
+
+    project = cls(dist_name, src_root, bld_root, b.dist_scripts, subprojects, options)
+    names = project.create_dist(archives)
+
+    if names is None:
+        return 1
+    rc = 0
+    if not options.no_tests:
+        # Check only one.
+        rc = check_dist(names[0], get_meson_command(), extra_meson_args, bld_root, priv_dir)
+    if rc == 0:
+        for name in names:
+            create_hash(name)
+            print('Created', name)
+    return rc
diff --git a/vendored-meson/meson/mesonbuild/mesondata.py b/vendored-meson/meson/mesonbuild/mesondata.py
new file mode 100644
index 000000000000..da641fda7593
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/mesondata.py
@@ -0,0 +1,48 @@
+# Copyright 2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+
+import importlib.resources
+from pathlib import PurePosixPath, Path
+import typing as T
+
+if T.TYPE_CHECKING:
+    from .environment import Environment
+
+class DataFile:
+    def __init__(self, path: str) -> None:
+        self.path = PurePosixPath(path)
+
+    def write_once(self, path: Path) -> None:
+        if not path.exists():
+            data = importlib.resources.read_text( # [ignore encoding] it's on the next lines, Mr. Lint
+                    ('mesonbuild' / self.path.parent).as_posix().replace('/', '.'),
+                    self.path.name,
+                    encoding='utf-8')
+            path.write_text(data, encoding='utf-8')
+
+    def write_to_private(self, env: 'Environment') -> Path:
+        try:
+            resource = importlib.resources.files('mesonbuild') / self.path
+            if isinstance(resource, Path):
+                return resource
+        except AttributeError:
+            # fall through to python 3.7 compatible code
+            pass
+
+        out_file = Path(env.scratch_dir) / 'data' / self.path.name
+        out_file.parent.mkdir(exist_ok=True)
+        self.write_once(out_file)
+        return out_file
diff --git a/vendored-meson/meson/mesonbuild/mesonlib.py b/vendored-meson/meson/mesonbuild/mesonlib.py
new file mode 100644
index 000000000000..be69a1271098
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/mesonlib.py
@@ -0,0 +1,35 @@
+# SPDX-license-identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# pylint: skip-file
+
+"""Helper functions and classes."""
+
+import os
+
+from .utils.core import *
+from .utils.vsenv import *
+
+from .utils.universal import *
+
+# Here we import either the posix implementations, the windows implementations,
+# or a generic no-op implementation
+if os.name == 'posix':
+    from .utils.posix import *
+elif os.name == 'nt':
+    from .utils.win32 import *
+else:
+    from .utils.platform import *
diff --git a/vendored-meson/meson/mesonbuild/mesonmain.py b/vendored-meson/meson/mesonbuild/mesonmain.py
new file mode 100644
index 000000000000..72a7ab945d43
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/mesonmain.py
@@ -0,0 +1,297 @@
+# Copyright 2012-2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+# Work around some pathlib bugs...
+
+from . import _pathlib
+import sys
+sys.modules['pathlib'] = _pathlib
+
+# This file is an entry point for all commands, including scripts. Include the
+# strict minimum python modules for performance reasons.
+import os.path
+import platform
+import importlib
+import argparse
+
+from .utils.core import MesonException, MesonBugException
+from . import mlog
+
+def errorhandler(e, command):
+    import traceback
+    if isinstance(e, MesonException):
+        mlog.exception(e)
+        logfile = mlog.shutdown()
+        if logfile is not None:
+            mlog.log("\nA full log can be found at", mlog.bold(logfile))
+        if os.environ.get('MESON_FORCE_BACKTRACE'):
+            raise e
+        return 1
+    else:
+        # We assume many types of traceback are Meson logic bugs, but most
+        # particularly anything coming from the interpreter during `setup`.
+        # Some things definitely aren't:
+        # - PermissionError is always a problem in the user environment
+        # - runpython doesn't run Meson's own code, even though it is
+        #   dispatched by our run()
+        if os.environ.get('MESON_FORCE_BACKTRACE'):
+            raise e
+        traceback.print_exc()
+
+        if command == 'runpython':
+            return 2
+        elif isinstance(e, OSError):
+            mlog.exception("Unhandled python OSError. This is probably not a Meson bug, "
+                           "but an issue with your build environment.")
+            return e.errno
+        else: # Exception
+            msg = 'Unhandled python exception'
+            if all(getattr(e, a, None) is not None for a in ['file', 'lineno', 'colno']):
+                e = MesonBugException(msg, e.file, e.lineno, e.colno) # type: ignore
+            else:
+                e = MesonBugException(msg)
+            mlog.exception(e)
+        return 2
+
+# Note: when adding arguments, please also add them to the completion
+# scripts in $MESONSRC/data/shell-completions/
+class CommandLineParser:
+    def __init__(self):
+        # only import these once we do full argparse processing
+        from . import mconf, mdist, minit, minstall, mintro, msetup, mtest, rewriter, msubprojects, munstable_coredata, mcompile, mdevenv
+        from .scripts import env2mfile
+        from .wrap import wraptool
+        import shutil
+
+        self.term_width = shutil.get_terminal_size().columns
+        self.formatter = lambda prog: argparse.HelpFormatter(prog, max_help_position=int(self.term_width / 2), width=self.term_width)
+
+        self.commands = {}
+        self.hidden_commands = []
+        self.parser = argparse.ArgumentParser(prog='meson', formatter_class=self.formatter)
+        self.subparsers = self.parser.add_subparsers(title='Commands', dest='command',
+                                                     description='If no command is specified it defaults to setup command.')
+        self.add_command('setup', msetup.add_arguments, msetup.run,
+                         help_msg='Configure the project')
+        self.add_command('configure', mconf.add_arguments, mconf.run,
+                         help_msg='Change project options',)
+        self.add_command('dist', mdist.add_arguments, mdist.run,
+                         help_msg='Generate release archive',)
+        self.add_command('install', minstall.add_arguments, minstall.run,
+                         help_msg='Install the project')
+        self.add_command('introspect', mintro.add_arguments, mintro.run,
+                         help_msg='Introspect project')
+        self.add_command('init', minit.add_arguments, minit.run,
+                         help_msg='Create a new project')
+        self.add_command('test', mtest.add_arguments, mtest.run,
+                         help_msg='Run tests')
+        self.add_command('wrap', wraptool.add_arguments, wraptool.run,
+                         help_msg='Wrap tools')
+        self.add_command('subprojects', msubprojects.add_arguments, msubprojects.run,
+                         help_msg='Manage subprojects')
+        self.add_command('rewrite', lambda parser: rewriter.add_arguments(parser, self.formatter), rewriter.run,
+                         help_msg='Modify the project definition')
+        self.add_command('compile', mcompile.add_arguments, mcompile.run,
+                         help_msg='Build the project')
+        self.add_command('devenv', mdevenv.add_arguments, mdevenv.run,
+                         help_msg='Run commands in developer environment')
+        self.add_command('env2mfile', env2mfile.add_arguments, env2mfile.run,
+                         help_msg='Convert current environment to a cross or native file')
+        # Add new commands above this line to list them in help command
+        self.add_command('help', self.add_help_arguments, self.run_help_command,
+                         help_msg='Print help of a subcommand')
+
+        # Hidden commands
+        self.add_command('runpython', self.add_runpython_arguments, self.run_runpython_command,
+                         help_msg=argparse.SUPPRESS)
+        self.add_command('unstable-coredata', munstable_coredata.add_arguments, munstable_coredata.run,
+                         help_msg=argparse.SUPPRESS)
+
+    def add_command(self, name, add_arguments_func, run_func, help_msg, aliases=None):
+        aliases = aliases or []
+        # FIXME: Cannot have hidden subparser:
+        # https://bugs.python.org/issue22848
+        if help_msg == argparse.SUPPRESS:
+            p = argparse.ArgumentParser(prog='meson ' + name, formatter_class=self.formatter)
+            self.hidden_commands.append(name)
+        else:
+            p = self.subparsers.add_parser(name, help=help_msg, aliases=aliases, formatter_class=self.formatter)
+        add_arguments_func(p)
+        p.set_defaults(run_func=run_func)
+        for i in [name] + aliases:
+            self.commands[i] = p
+
+    def add_runpython_arguments(self, parser: argparse.ArgumentParser):
+        parser.add_argument('-c', action='store_true', dest='eval_arg', default=False)
+        parser.add_argument('--version', action='version', version=platform.python_version())
+        parser.add_argument('script_file')
+        parser.add_argument('script_args', nargs=argparse.REMAINDER)
+
+    def run_runpython_command(self, options):
+        sys.argv[1:] = options.script_args
+        if options.eval_arg:
+            exec(options.script_file)
+        else:
+            import runpy
+            sys.path.insert(0, os.path.dirname(options.script_file))
+            runpy.run_path(options.script_file, run_name='__main__')
+        return 0
+
+    def add_help_arguments(self, parser):
+        parser.add_argument('command', nargs='?', choices=list(self.commands.keys()))
+
+    def run_help_command(self, options):
+        if options.command:
+            self.commands[options.command].print_help()
+        else:
+            self.parser.print_help()
+        return 0
+
+    def run(self, args):
+        implicit_setup_command_notice = False
+        # If first arg is not a known command, assume user wants to run the setup
+        # command.
+        known_commands = list(self.commands.keys()) + ['-h', '--help']
+        if not args or args[0] not in known_commands:
+            implicit_setup_command_notice = True
+            args = ['setup'] + args
+
+        # Hidden commands have their own parser instead of using the global one
+        if args[0] in self.hidden_commands:
+            command = args[0]
+            parser = self.commands[command]
+            args = args[1:]
+        else:
+            parser = self.parser
+            command = None
+
+        from . import mesonlib
+        args = mesonlib.expand_arguments(args)
+        options = parser.parse_args(args)
+
+        if command is None:
+            command = options.command
+
+        # Bump the version here in order to add a pre-exit warning that we are phasing out
+        # support for old python. If this is already the oldest supported version, then
+        # this can never be true and does nothing.
+        pending_python_deprecation_notice = \
+            command in {'setup', 'compile', 'test', 'install'} and sys.version_info < (3, 7)
+
+        try:
+            return options.run_func(options)
+        except Exception as e:
+            return errorhandler(e, command)
+        finally:
+            if implicit_setup_command_notice:
+                mlog.warning('Running the setup command as `meson [options]` instead of '
+                             '`meson setup [options]` is ambiguous and deprecated.', fatal=False)
+            if pending_python_deprecation_notice:
+                mlog.notice('You are using Python 3.6 which is EOL. Starting with v0.62.0, '
+                            'Meson will require Python 3.7 or newer', fatal=False)
+            mlog.shutdown()
+
+def run_script_command(script_name, script_args):
+    # Map script name to module name for those that doesn't match
+    script_map = {'exe': 'meson_exe',
+                  'install': 'meson_install',
+                  'delsuffix': 'delwithsuffix',
+                  'gtkdoc': 'gtkdochelper',
+                  'hotdoc': 'hotdochelper',
+                  'regencheck': 'regen_checker'}
+    module_name = script_map.get(script_name, script_name)
+
+    try:
+        module = importlib.import_module('mesonbuild.scripts.' + module_name)
+    except ModuleNotFoundError as e:
+        mlog.exception(e)
+        return 1
+
+    try:
+        return module.run(script_args)
+    except MesonException as e:
+        mlog.error(f'Error in {script_name} helper script:')
+        mlog.exception(e)
+        return 1
+
+def ensure_stdout_accepts_unicode():
+    if sys.stdout.encoding and not sys.stdout.encoding.upper().startswith('UTF-'):
+        sys.stdout.reconfigure(errors='surrogateescape')
+
+def set_meson_command(mainfile):
+    # Set the meson command that will be used to run scripts and so on
+    from . import mesonlib
+    mesonlib.set_meson_command(mainfile)
+
+def run(original_args, mainfile):
+    if os.environ.get('MESON_SHOW_DEPRECATIONS'):
+        # workaround for https://bugs.python.org/issue34624
+        import warnings
+        for typ in [DeprecationWarning, SyntaxWarning, FutureWarning, PendingDeprecationWarning]:
+            warnings.filterwarnings('error', category=typ, module='mesonbuild')
+        warnings.filterwarnings('ignore', message=".*importlib-resources.*")
+
+    if sys.version_info >= (3, 10) and os.environ.get('MESON_RUNNING_IN_PROJECT_TESTS'):
+        # workaround for https://bugs.python.org/issue34624
+        import warnings
+        warnings.filterwarnings('error', category=EncodingWarning, module='mesonbuild')
+        # python 3.11 adds a warning that in 3.15, UTF-8 mode will be default.
+        # This is fantastic news, we'd love that. Less fantastic: this warning is silly,
+        # we *want* these checks to be affected. Plus, the recommended alternative API
+        # would (in addition to warning people when UTF-8 mode removed the problem) also
+        # require using a minimum python version of 3.11 (in which the warning was added)
+        # or add verbose if/else soup.
+        warnings.filterwarnings('ignore', message="UTF-8 Mode affects .*getpreferredencoding", category=EncodingWarning)
+
+    # Meson gets confused if stdout can't output Unicode, if the
+    # locale isn't Unicode, just force stdout to accept it. This tries
+    # to emulate enough of PEP 540 to work elsewhere.
+    ensure_stdout_accepts_unicode()
+
+    # https://github.com/mesonbuild/meson/issues/3653
+    if sys.platform == 'cygwin' and os.environ.get('MSYSTEM', '') not in ['MSYS', '']:
+        mlog.error('This python3 seems to be msys/python on MSYS2 Windows, but you are in a MinGW environment')
+        mlog.error('Please install and use mingw-w64-x86_64-python3 and/or mingw-w64-x86_64-meson with Pacman')
+        return 2
+
+    args = original_args[:]
+
+    # Special handling of internal commands called from backends, they don't
+    # need to go through argparse.
+    if len(args) >= 2 and args[0] == '--internal':
+        if args[1] == 'regenerate':
+            set_meson_command(mainfile)
+            from . import msetup
+            try:
+                return msetup.run(['--reconfigure'] + args[2:])
+            except Exception as e:
+                return errorhandler(e, 'setup')
+        else:
+            return run_script_command(args[1], args[2:])
+
+    set_meson_command(mainfile)
+    return CommandLineParser().run(args)
+
+def main():
+    # Always resolve the command path so Ninja can find it for regen, tests, etc.
+    if 'meson.exe' in sys.executable:
+        assert os.path.isabs(sys.executable)
+        launcher = sys.executable
+    else:
+        launcher = os.path.abspath(sys.argv[0])
+    return run(sys.argv[1:], launcher)
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/vendored-meson/meson/mesonbuild/minit.py b/vendored-meson/meson/mesonbuild/minit.py
new file mode 100644
index 000000000000..7cca9cfc1fe9
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/minit.py
@@ -0,0 +1,199 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Code that creates simple startup projects."""
+
+from pathlib import Path
+from enum import Enum
+import subprocess
+import shutil
+import sys
+import os
+import re
+from glob import glob
+from mesonbuild import build, mesonlib, mlog
+from mesonbuild.coredata import FORBIDDEN_TARGET_NAMES
+from mesonbuild.environment import detect_ninja
+from mesonbuild.templates.samplefactory import sameple_generator
+import typing as T
+
+if T.TYPE_CHECKING:
+    import argparse
+
+'''
+we currently have one meson template at this time.
+'''
+from mesonbuild.templates.mesontemplates import create_meson_build
+
+FORTRAN_SUFFIXES = {'.f', '.for', '.F', '.f90', '.F90'}
+LANG_SUFFIXES = {'.c', '.cc', '.cpp', '.cs', '.cu', '.d', '.m', '.mm', '.rs', '.java', '.vala'} | FORTRAN_SUFFIXES
+LANG_SUPPORTED = {'c', 'cpp', 'cs', 'cuda', 'd', 'fortran', 'java', 'rust', 'objc', 'objcpp', 'vala'}
+
+DEFAULT_PROJECT = 'executable'
+DEFAULT_VERSION = '0.1'
+class DEFAULT_TYPES(Enum):
+    EXE = 'executable'
+    LIB = 'library'
+
+INFO_MESSAGE = '''Sample project created. To build it run the
+following commands:
+
+meson setup builddir
+meson compile -C builddir
+'''
+
+
+def create_sample(options: 'argparse.Namespace') -> None:
+    '''
+    Based on what arguments are passed we check for a match in language
+    then check for project type and create new Meson samples project.
+    '''
+    sample_gen = sameple_generator(options)
+    if options.type == DEFAULT_TYPES['EXE'].value:
+        sample_gen.create_executable()
+    elif options.type == DEFAULT_TYPES['LIB'].value:
+        sample_gen.create_library()
+    else:
+        raise RuntimeError('Unreachable code')
+    print(INFO_MESSAGE)
+
+def autodetect_options(options: 'argparse.Namespace', sample: bool = False) -> None:
+    '''
+    Here we autodetect options for args not passed in so don't have to
+    think about it.
+    '''
+    if not options.name:
+        options.name = Path().resolve().stem
+        if not re.match('[a-zA-Z_][a-zA-Z0-9]*', options.name) and sample:
+            raise SystemExit(f'Name of current directory "{options.name}" is not usable as a sample project name.\n'
+                             'Specify a project name with --name.')
+        print(f'Using "{options.name}" (name of current directory) as project name.')
+    if not options.executable:
+        options.executable = options.name
+        print(f'Using "{options.executable}" (project name) as name of executable to build.')
+    if options.executable in FORBIDDEN_TARGET_NAMES:
+        raise mesonlib.MesonException(f'Executable name {options.executable!r} is reserved for Meson internal use. '
+                                      'Refusing to init an invalid project.')
+    if sample:
+        # The rest of the autodetection is not applicable to generating sample projects.
+        return
+    if not options.srcfiles:
+        srcfiles = []
+        for f in (f for f in Path().iterdir() if f.is_file()):
+            if f.suffix in LANG_SUFFIXES:
+                srcfiles.append(f)
+        if not srcfiles:
+            raise SystemExit('No recognizable source files found.\n'
+                             'Run meson init in an empty directory to create a sample project.')
+        options.srcfiles = srcfiles
+        print("Detected source files: " + ' '.join(str(s) for s in srcfiles))
+    options.srcfiles = [Path(f) for f in options.srcfiles]
+    if not options.language:
+        for f in options.srcfiles:
+            if f.suffix == '.c':
+                options.language = 'c'
+                break
+            if f.suffix in {'.cc', '.cpp'}:
+                options.language = 'cpp'
+                break
+            if f.suffix == '.cs':
+                options.language = 'cs'
+                break
+            if f.suffix == '.cu':
+                options.language = 'cuda'
+                break
+            if f.suffix == '.d':
+                options.language = 'd'
+                break
+            if f.suffix in FORTRAN_SUFFIXES:
+                options.language = 'fortran'
+                break
+            if f.suffix == '.rs':
+                options.language = 'rust'
+                break
+            if f.suffix == '.m':
+                options.language = 'objc'
+                break
+            if f.suffix == '.mm':
+                options.language = 'objcpp'
+                break
+            if f.suffix == '.java':
+                options.language = 'java'
+                break
+            if f.suffix == '.vala':
+                options.language = 'vala'
+                break
+        if not options.language:
+            raise SystemExit("Can't autodetect language, please specify it with -l.")
+        print("Detected language: " + options.language)
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+    '''
+    Here we add args for that the user can passed when making a new
+    Meson project.
+    '''
+    parser.add_argument("srcfiles", metavar="sourcefile", nargs="*", help="source files. default: all recognized files in current directory")
+    parser.add_argument('-C', dest='wd', action=mesonlib.RealPathAction,
+                        help='directory to cd into before running')
+    parser.add_argument("-n", "--name", help="project name. default: name of current directory")
+    parser.add_argument("-e", "--executable", help="executable name. default: project name")
+    parser.add_argument("-d", "--deps", help="dependencies, comma-separated")
+    parser.add_argument("-l", "--language", choices=sorted(LANG_SUPPORTED), help="project language. default: autodetected based on source files")
+    parser.add_argument("-b", "--build", action='store_true', help="build after generation")
+    parser.add_argument("--builddir", default='build', help="directory for build")
+    parser.add_argument("-f", "--force", action="store_true", help="force overwrite of existing files and directories.")
+    parser.add_argument('--type', default=DEFAULT_PROJECT, choices=('executable', 'library'), help=f"project type. default: {DEFAULT_PROJECT} based project")
+    parser.add_argument('--version', default=DEFAULT_VERSION, help=f"project version. default: {DEFAULT_VERSION}")
+
+def run(options: 'argparse.Namespace') -> int:
+    '''
+    Here we generate the new Meson sample project.
+    '''
+    if not Path(options.wd).exists():
+        sys.exit('Project source root directory not found. Run this command in source directory root.')
+    os.chdir(options.wd)
+
+    if not glob('*'):
+        autodetect_options(options, sample=True)
+        if not options.language:
+            print('Defaulting to generating a C language project.')
+            options.language = 'c'
+        create_sample(options)
+    else:
+        autodetect_options(options)
+        if Path('meson.build').is_file() and not options.force:
+            raise SystemExit('meson.build already exists. Use --force to overwrite.')
+        create_meson_build(options)
+    if options.build:
+        if Path(options.builddir).is_dir() and options.force:
+            print('Build directory already exists, deleting it.')
+            shutil.rmtree(options.builddir)
+        print('Building...')
+        cmd = mesonlib.get_meson_command() + ['setup', options.builddir]
+        ret = subprocess.run(cmd)
+        if ret.returncode:
+            raise SystemExit
+
+        b = build.load(options.builddir)
+        need_vsenv = T.cast('bool', b.environment.coredata.get_option(mesonlib.OptionKey('vsenv')))
+        vsenv_active = mesonlib.setup_vsenv(need_vsenv)
+        if vsenv_active:
+            mlog.log(mlog.green('INFO:'), 'automatically activated MSVC compiler environment')
+
+        cmd = detect_ninja() + ['-C', options.builddir]
+        ret = subprocess.run(cmd)
+        if ret.returncode:
+            raise SystemExit
+    return 0
diff --git a/vendored-meson/meson/mesonbuild/minstall.py b/vendored-meson/meson/mesonbuild/minstall.py
new file mode 100644
index 000000000000..49006917e914
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/minstall.py
@@ -0,0 +1,864 @@
+# Copyright 2013-2014 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from glob import glob
+import argparse
+import errno
+import os
+import selectors
+import shlex
+import shutil
+import subprocess
+import sys
+import typing as T
+import re
+
+from . import build, coredata, environment
+from .backend.backends import InstallData
+from .mesonlib import (MesonException, Popen_safe, RealPathAction, is_windows,
+                       is_aix, setup_vsenv, pickle_load, is_osx, OptionKey)
+from .scripts import depfixer, destdir_join
+from .scripts.meson_exe import run_exe
+try:
+    from __main__ import __file__ as main_file
+except ImportError:
+    # Happens when running as meson.exe which is native Windows.
+    # This is only used for pkexec which is not, so this is fine.
+    main_file = None
+
+if T.TYPE_CHECKING:
+    from .backend.backends import (
+            ExecutableSerialisation, InstallDataBase, InstallEmptyDir,
+            InstallSymlinkData, TargetInstallData
+    )
+    from .mesonlib import FileMode, EnvironOrDict
+
+    try:
+        from typing import Protocol
+    except AttributeError:
+        from typing_extensions import Protocol  # type: ignore
+
+    class ArgumentType(Protocol):
+        """Typing information for the object returned by argparse."""
+        no_rebuild: bool
+        only_changed: bool
+        profile: bool
+        quiet: bool
+        wd: str
+        destdir: str
+        dry_run: bool
+        skip_subprojects: str
+        tags: str
+        strip: bool
+
+
+symlink_warning = '''Warning: trying to copy a symlink that points to a file. This will copy the file,
+but this will be changed in a future version of Meson to copy the symlink as is. Please update your
+build definitions so that it will not break when the change happens.'''
+
+selinux_updates: T.List[str] = []
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+    parser.add_argument('-C', dest='wd', action=RealPathAction,
+                        help='directory to cd into before running')
+    parser.add_argument('--profile-self', action='store_true', dest='profile',
+                        help=argparse.SUPPRESS)
+    parser.add_argument('--no-rebuild', default=False, action='store_true',
+                        help='Do not rebuild before installing.')
+    parser.add_argument('--only-changed', default=False, action='store_true',
+                        help='Only overwrite files that are older than the copied file.')
+    parser.add_argument('--quiet', default=False, action='store_true',
+                        help='Do not print every file that was installed.')
+    parser.add_argument('--destdir', default=None,
+                        help='Sets or overrides DESTDIR environment. (Since 0.57.0)')
+    parser.add_argument('--dry-run', '-n', action='store_true',
+                        help='Doesn\'t actually install, but print logs. (Since 0.57.0)')
+    parser.add_argument('--skip-subprojects', nargs='?', const='*', default='',
+                        help='Do not install files from given subprojects. (Since 0.58.0)')
+    parser.add_argument('--tags', default=None,
+                        help='Install only targets having one of the given tags. (Since 0.60.0)')
+    parser.add_argument('--strip', action='store_true',
+                        help='Strip targets even if strip option was not set during configure. (Since 0.62.0)')
+
+class DirMaker:
+    def __init__(self, lf: T.TextIO, makedirs: T.Callable[..., None]):
+        self.lf = lf
+        self.dirs: T.List[str] = []
+        self.all_dirs: T.Set[str] = set()
+        self.makedirs_impl = makedirs
+
+    def makedirs(self, path: str, exist_ok: bool = False) -> None:
+        dirname = os.path.normpath(path)
+        self.all_dirs.add(dirname)
+        dirs = []
+        while dirname != os.path.dirname(dirname):
+            if dirname in self.dirs:
+                # In dry-run mode the directory does not exist but we would have
+                # created it with all its parents otherwise.
+                break
+            if not os.path.exists(dirname):
+                dirs.append(dirname)
+            dirname = os.path.dirname(dirname)
+        self.makedirs_impl(path, exist_ok=exist_ok)
+
+        # store the directories in creation order, with the parent directory
+        # before the child directories. Future calls of makedir() will not
+        # create the parent directories, so the last element in the list is
+        # the last one to be created. That is the first one to be removed on
+        # __exit__
+        dirs.reverse()
+        self.dirs += dirs
+
+    def __enter__(self) -> 'DirMaker':
+        return self
+
+    def __exit__(self, exception_type: T.Type[Exception], value: T.Any, traceback: T.Any) -> None:
+        self.dirs.reverse()
+        for d in self.dirs:
+            append_to_log(self.lf, d)
+
+
+def load_install_data(fname: str) -> InstallData:
+    return pickle_load(fname, 'InstallData', InstallData)
+
+def is_executable(path: str, follow_symlinks: bool = False) -> bool:
+    '''Checks whether any of the "x" bits are set in the source file mode.'''
+    return bool(os.stat(path, follow_symlinks=follow_symlinks).st_mode & 0o111)
+
+
+def append_to_log(lf: T.TextIO, line: str) -> None:
+    lf.write(line)
+    if not line.endswith('\n'):
+        lf.write('\n')
+    lf.flush()
+
+
+def set_chown(path: str, user: T.Union[str, int, None] = None,
+              group: T.Union[str, int, None] = None,
+              dir_fd: T.Optional[int] = None, follow_symlinks: bool = True) -> None:
+    # shutil.chown will call os.chown without passing all the parameters
+    # and particularly follow_symlinks, thus we replace it temporary
+    # with a lambda with all the parameters so that follow_symlinks will
+    # be actually passed properly.
+    # Not nice, but better than actually rewriting shutil.chown until
+    # this python bug is fixed: https://bugs.python.org/issue18108
+    real_os_chown = os.chown
+
+    def chown(path: T.Union[int, str, 'os.PathLike[str]', bytes, 'os.PathLike[bytes]'],
+              uid: int, gid: int, *, dir_fd: T.Optional[int] = dir_fd,
+              follow_symlinks: bool = follow_symlinks) -> None:
+        """Override the default behavior of os.chown
+
+        Use a real function rather than a lambda to help mypy out. Also real
+        functions are faster.
+        """
+        real_os_chown(path, uid, gid, dir_fd=dir_fd, follow_symlinks=follow_symlinks)
+
+    try:
+        os.chown = chown
+        shutil.chown(path, user, group)
+    finally:
+        os.chown = real_os_chown
+
+
+def set_chmod(path: str, mode: int, dir_fd: T.Optional[int] = None,
+              follow_symlinks: bool = True) -> None:
+    try:
+        os.chmod(path, mode, dir_fd=dir_fd, follow_symlinks=follow_symlinks)
+    except (NotImplementedError, OSError, SystemError):
+        if not os.path.islink(path):
+            os.chmod(path, mode, dir_fd=dir_fd)
+
+
+def sanitize_permissions(path: str, umask: T.Union[str, int]) -> None:
+    # TODO: with python 3.8 or typing_extensions we could replace this with
+    # `umask: T.Union[T.Literal['preserve'], int]`, which would be more correct
+    if umask == 'preserve':
+        return
+    assert isinstance(umask, int), 'umask should only be "preserver" or an integer'
+    new_perms = 0o777 if is_executable(path, follow_symlinks=False) else 0o666
+    new_perms &= ~umask
+    try:
+        set_chmod(path, new_perms, follow_symlinks=False)
+    except PermissionError as e:
+        print(f'{path!r}: Unable to set permissions {new_perms!r}: {e.strerror}, ignoring...')
+
+
+def set_mode(path: str, mode: T.Optional['FileMode'], default_umask: T.Union[str, int]) -> None:
+    if mode is None or all(m is None for m in [mode.perms_s, mode.owner, mode.group]):
+        # Just sanitize permissions with the default umask
+        sanitize_permissions(path, default_umask)
+        return
+    # No chown() on Windows, and must set one of owner/group
+    if not is_windows() and (mode.owner is not None or mode.group is not None):
+        try:
+            set_chown(path, mode.owner, mode.group, follow_symlinks=False)
+        except PermissionError as e:
+            print(f'{path!r}: Unable to set owner {mode.owner!r} and group {mode.group!r}: {e.strerror}, ignoring...')
+        except LookupError:
+            print(f'{path!r}: Nonexistent owner {mode.owner!r} or group {mode.group!r}: ignoring...')
+        except OSError as e:
+            if e.errno == errno.EINVAL:
+                print(f'{path!r}: Nonexistent numeric owner {mode.owner!r} or group {mode.group!r}: ignoring...')
+            else:
+                raise
+    # Must set permissions *after* setting owner/group otherwise the
+    # setuid/setgid bits will get wiped by chmod
+    # NOTE: On Windows you can set read/write perms; the rest are ignored
+    if mode.perms_s is not None:
+        try:
+            set_chmod(path, mode.perms, follow_symlinks=False)
+        except PermissionError as e:
+            print(f'{path!r}: Unable to set permissions {mode.perms_s!r}: {e.strerror}, ignoring...')
+    else:
+        sanitize_permissions(path, default_umask)
+
+
+def restore_selinux_contexts() -> None:
+    '''
+    Restores the SELinux context for files in @selinux_updates
+
+    If $DESTDIR is set, do not warn if the call fails.
+    '''
+    try:
+        subprocess.check_call(['selinuxenabled'])
+    except (FileNotFoundError, NotADirectoryError, OSError, PermissionError, subprocess.CalledProcessError):
+        # If we don't have selinux or selinuxenabled returned 1, failure
+        # is ignored quietly.
+        return
+
+    if not shutil.which('restorecon'):
+        # If we don't have restorecon, failure is ignored quietly.
+        return
+
+    if not selinux_updates:
+        # If the list of files is empty, do not try to call restorecon.
+        return
+
+    proc, out, err = Popen_safe(['restorecon', '-F', '-f-', '-0'], ('\0'.join(f for f in selinux_updates) + '\0'))
+    if proc.returncode != 0:
+        print('Failed to restore SELinux context of installed files...',
+              'Standard output:', out,
+              'Standard error:', err, sep='\n')
+
+def get_destdir_path(destdir: str, fullprefix: str, path: str) -> str:
+    if os.path.isabs(path):
+        output = destdir_join(destdir, path)
+    else:
+        output = os.path.join(fullprefix, path)
+    return output
+
+
+def check_for_stampfile(fname: str) -> str:
+    '''Some languages e.g. Rust have output files
+    whose names are not known at configure time.
+    Check if this is the case and return the real
+    file instead.'''
+    if fname.endswith('.so') or fname.endswith('.dll'):
+        if os.stat(fname).st_size == 0:
+            (base, suffix) = os.path.splitext(fname)
+            files = glob(base + '-*' + suffix)
+            if len(files) > 1:
+                print("Stale dynamic library files in build dir. Can't install.")
+                sys.exit(1)
+            if len(files) == 1:
+                return files[0]
+    elif fname.endswith('.a') or fname.endswith('.lib'):
+        if os.stat(fname).st_size == 0:
+            (base, suffix) = os.path.splitext(fname)
+            files = glob(base + '-*' + '.rlib')
+            if len(files) > 1:
+                print("Stale static library files in build dir. Can't install.")
+                sys.exit(1)
+            if len(files) == 1:
+                return files[0]
+    return fname
+
+
+class Installer:
+
+    def __init__(self, options: 'ArgumentType', lf: T.TextIO):
+        self.did_install_something = False
+        self.printed_symlink_error = False
+        self.options = options
+        self.lf = lf
+        self.preserved_file_count = 0
+        self.dry_run = options.dry_run
+        # [''] means skip none,
+        # ['*'] means skip all,
+        # ['sub1', ...] means skip only those.
+        self.skip_subprojects = [i.strip() for i in options.skip_subprojects.split(',')]
+        self.tags = [i.strip() for i in options.tags.split(',')] if options.tags else None
+
+    def remove(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            os.remove(*args, **kwargs)
+
+    def symlink(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            os.symlink(*args, **kwargs)
+
+    def makedirs(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            os.makedirs(*args, **kwargs)
+
+    def copy(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            shutil.copy(*args, **kwargs)
+
+    def copy2(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            shutil.copy2(*args, **kwargs)
+
+    def copyfile(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            shutil.copyfile(*args, **kwargs)
+
+    def copystat(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            shutil.copystat(*args, **kwargs)
+
+    def fix_rpath(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            depfixer.fix_rpath(*args, **kwargs)
+
+    def set_chown(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            set_chown(*args, **kwargs)
+
+    def set_chmod(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            set_chmod(*args, **kwargs)
+
+    def sanitize_permissions(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            sanitize_permissions(*args, **kwargs)
+
+    def set_mode(self, *args: T.Any, **kwargs: T.Any) -> None:
+        if not self.dry_run:
+            set_mode(*args, **kwargs)
+
+    def restore_selinux_contexts(self, destdir: str) -> None:
+        if not self.dry_run and not destdir:
+            restore_selinux_contexts()
+
+    def Popen_safe(self, *args: T.Any, **kwargs: T.Any) -> T.Tuple[int, str, str]:
+        if not self.dry_run:
+            p, o, e = Popen_safe(*args, **kwargs)
+            return p.returncode, o, e
+        return 0, '', ''
+
+    def run_exe(self, exe: ExecutableSerialisation, extra_env: T.Optional[T.Dict[str, str]] = None) -> int:
+        if (not self.dry_run) or exe.dry_run:
+            return run_exe(exe, extra_env)
+        return 0
+
+    def should_install(self, d: T.Union[TargetInstallData, InstallEmptyDir,
+                                        InstallDataBase, InstallSymlinkData,
+                                        ExecutableSerialisation]) -> bool:
+        if d.subproject and (d.subproject in self.skip_subprojects or '*' in self.skip_subprojects):
+            return False
+        if self.tags and d.tag not in self.tags:
+            return False
+        return True
+
+    def log(self, msg: str) -> None:
+        if not self.options.quiet:
+            print(msg)
+
+    def should_preserve_existing_file(self, from_file: str, to_file: str) -> bool:
+        if not self.options.only_changed:
+            return False
+        # Always replace danging symlinks
+        if os.path.islink(from_file) and not os.path.isfile(from_file):
+            return False
+        from_time = os.stat(from_file).st_mtime
+        to_time = os.stat(to_file).st_mtime
+        return from_time <= to_time
+
+    def do_copyfile(self, from_file: str, to_file: str,
+                    makedirs: T.Optional[T.Tuple[T.Any, str]] = None) -> bool:
+        outdir = os.path.split(to_file)[0]
+        if not os.path.isfile(from_file) and not os.path.islink(from_file):
+            raise MesonException(f'Tried to install something that isn\'t a file: {from_file!r}')
+        # copyfile fails if the target file already exists, so remove it to
+        # allow overwriting a previous install. If the target is not a file, we
+        # want to give a readable error.
+        if os.path.exists(to_file):
+            if not os.path.isfile(to_file):
+                raise MesonException(f'Destination {to_file!r} already exists and is not a file')
+            if self.should_preserve_existing_file(from_file, to_file):
+                append_to_log(self.lf, f'# Preserving old file {to_file}\n')
+                self.preserved_file_count += 1
+                return False
+            self.remove(to_file)
+        elif makedirs:
+            # Unpack tuple
+            dirmaker, outdir = makedirs
+            # Create dirs if needed
+            dirmaker.makedirs(outdir, exist_ok=True)
+        self.log(f'Installing {from_file} to {outdir}')
+        if os.path.islink(from_file):
+            if not os.path.exists(from_file):
+                # Dangling symlink. Replicate as is.
+                self.copy(from_file, outdir, follow_symlinks=False)
+            else:
+                # Remove this entire branch when changing the behaviour to duplicate
+                # symlinks rather than copying what they point to.
+                print(symlink_warning)
+                self.copy2(from_file, to_file)
+        else:
+            self.copy2(from_file, to_file)
+        selinux_updates.append(to_file)
+        append_to_log(self.lf, to_file)
+        return True
+
+    def do_symlink(self, target: str, link: str, destdir: str, full_dst_dir: str, allow_missing: bool) -> bool:
+        abs_target = target
+        if not os.path.isabs(target):
+            abs_target = os.path.join(full_dst_dir, target)
+        elif not os.path.exists(abs_target) and not allow_missing:
+            abs_target = destdir_join(destdir, abs_target)
+        if not os.path.exists(abs_target) and not allow_missing:
+            raise MesonException(f'Tried to install symlink to missing file {abs_target}')
+        if os.path.exists(link):
+            if not os.path.islink(link):
+                raise MesonException(f'Destination {link!r} already exists and is not a symlink')
+            self.remove(link)
+        if not self.printed_symlink_error:
+            self.log(f'Installing symlink pointing to {target} to {link}')
+        try:
+            self.symlink(target, link, target_is_directory=os.path.isdir(abs_target))
+        except (NotImplementedError, OSError):
+            if not self.printed_symlink_error:
+                print("Symlink creation does not work on this platform. "
+                      "Skipping all symlinking.")
+                self.printed_symlink_error = True
+            return False
+        append_to_log(self.lf, link)
+        return True
+
+    def do_copydir(self, data: InstallData, src_dir: str, dst_dir: str,
+                   exclude: T.Optional[T.Tuple[T.Set[str], T.Set[str]]],
+                   install_mode: 'FileMode', dm: DirMaker) -> None:
+        '''
+        Copies the contents of directory @src_dir into @dst_dir.
+
+        For directory
+            /foo/
+              bar/
+                excluded
+                foobar
+              file
+        do_copydir(..., '/foo', '/dst/dir', {'bar/excluded'}) creates
+            /dst/
+              dir/
+                bar/
+                  foobar
+                file
+
+        Args:
+            src_dir: str, absolute path to the source directory
+            dst_dir: str, absolute path to the destination directory
+            exclude: (set(str), set(str)), tuple of (exclude_files, exclude_dirs),
+                     each element of the set is a path relative to src_dir.
+        '''
+        if not os.path.isabs(src_dir):
+            raise ValueError(f'src_dir must be absolute, got {src_dir}')
+        if not os.path.isabs(dst_dir):
+            raise ValueError(f'dst_dir must be absolute, got {dst_dir}')
+        if exclude is not None:
+            exclude_files, exclude_dirs = exclude
+            exclude_files = {os.path.normpath(x) for x in exclude_files}
+            exclude_dirs = {os.path.normpath(x) for x in exclude_dirs}
+        else:
+            exclude_files = exclude_dirs = set()
+        for root, dirs, files in os.walk(src_dir):
+            assert os.path.isabs(root)
+            for d in dirs[:]:
+                abs_src = os.path.join(root, d)
+                filepart = os.path.relpath(abs_src, start=src_dir)
+                abs_dst = os.path.join(dst_dir, filepart)
+                # Remove these so they aren't visited by os.walk at all.
+                if filepart in exclude_dirs:
+                    dirs.remove(d)
+                    continue
+                if os.path.isdir(abs_dst):
+                    continue
+                if os.path.exists(abs_dst):
+                    print(f'Tried to copy directory {abs_dst} but a file of that name already exists.')
+                    sys.exit(1)
+                dm.makedirs(abs_dst)
+                self.copystat(abs_src, abs_dst)
+                self.sanitize_permissions(abs_dst, data.install_umask)
+            for f in files:
+                abs_src = os.path.join(root, f)
+                filepart = os.path.relpath(abs_src, start=src_dir)
+                if filepart in exclude_files:
+                    continue
+                abs_dst = os.path.join(dst_dir, filepart)
+                if os.path.isdir(abs_dst):
+                    print(f'Tried to copy file {abs_dst} but a directory of that name already exists.')
+                    sys.exit(1)
+                parent_dir = os.path.dirname(abs_dst)
+                if not os.path.isdir(parent_dir):
+                    dm.makedirs(parent_dir)
+                    self.copystat(os.path.dirname(abs_src), parent_dir)
+                # FIXME: what about symlinks?
+                self.do_copyfile(abs_src, abs_dst)
+                self.set_mode(abs_dst, install_mode, data.install_umask)
+
+    def do_install(self, datafilename: str) -> None:
+        d = load_install_data(datafilename)
+
+        destdir = self.options.destdir
+        if destdir is None:
+            destdir = os.environ.get('DESTDIR')
+        if destdir and not os.path.isabs(destdir):
+            destdir = os.path.join(d.build_dir, destdir)
+        # Override in the env because some scripts could use it and require an
+        # absolute path.
+        if destdir is not None:
+            os.environ['DESTDIR'] = destdir
+        destdir = destdir or ''
+        fullprefix = destdir_join(destdir, d.prefix)
+
+        if d.install_umask != 'preserve':
+            assert isinstance(d.install_umask, int)
+            os.umask(d.install_umask)
+
+        self.did_install_something = False
+        try:
+            with DirMaker(self.lf, self.makedirs) as dm:
+                self.install_subdirs(d, dm, destdir, fullprefix) # Must be first, because it needs to delete the old subtree.
+                self.install_targets(d, dm, destdir, fullprefix)
+                self.install_headers(d, dm, destdir, fullprefix)
+                self.install_man(d, dm, destdir, fullprefix)
+                self.install_emptydir(d, dm, destdir, fullprefix)
+                self.install_data(d, dm, destdir, fullprefix)
+                self.install_symlinks(d, dm, destdir, fullprefix)
+                self.restore_selinux_contexts(destdir)
+                self.run_install_script(d, destdir, fullprefix)
+                if not self.did_install_something:
+                    self.log('Nothing to install.')
+                if not self.options.quiet and self.preserved_file_count > 0:
+                    self.log('Preserved {} unchanged files, see {} for the full list'
+                             .format(self.preserved_file_count, os.path.normpath(self.lf.name)))
+        except PermissionError:
+            if is_windows() or destdir != '' or not os.isatty(sys.stdout.fileno()) or not os.isatty(sys.stderr.fileno()):
+                # can't elevate to root except in an interactive unix environment *and* when not doing a destdir install
+                raise
+            rootcmd = os.environ.get('MESON_ROOT_CMD') or shutil.which('sudo') or shutil.which('doas')
+            pkexec = shutil.which('pkexec')
+            if rootcmd is None and pkexec is not None and 'PKEXEC_UID' not in os.environ:
+                rootcmd = pkexec
+
+            if rootcmd is not None:
+                print('Installation failed due to insufficient permissions.')
+                s = selectors.DefaultSelector()
+                s.register(sys.stdin, selectors.EVENT_READ)
+                ans = None
+                for attempt in range(5):
+                    print(f'Attempt to use {rootcmd} to gain elevated privileges? [y/n] ', end='', flush=True)
+                    if s.select(30):
+                        # we waited on sys.stdin *only*
+                        ans = sys.stdin.readline().rstrip('\n')
+                    else:
+                        print()
+                        break
+                    if ans in {'y', 'n'}:
+                        break
+                else:
+                    if ans is not None:
+                        raise MesonException('Answer not one of [y/n]')
+                if ans == 'y':
+                    os.execlp(rootcmd, rootcmd, sys.executable, main_file, *sys.argv[1:],
+                              '-C', os.getcwd(), '--no-rebuild')
+            raise
+
+    def do_strip(self, strip_bin: T.List[str], fname: str, outname: str) -> None:
+        self.log(f'Stripping target {fname!r}.')
+        if is_osx():
+            # macOS expects dynamic objects to be stripped with -x maximum.
+            # To also strip the debug info, -S must be added.
+            # See: https://www.unix.com/man-page/osx/1/strip/
+            returncode, stdo, stde = self.Popen_safe(strip_bin + ['-S', '-x', outname])
+        else:
+            returncode, stdo, stde = self.Popen_safe(strip_bin + [outname])
+        if returncode != 0:
+            print('Could not strip file.\n')
+            print(f'Stdout:\n{stdo}\n')
+            print(f'Stderr:\n{stde}\n')
+            sys.exit(1)
+
+    def install_subdirs(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+        for i in d.install_subdirs:
+            if not self.should_install(i):
+                continue
+            self.did_install_something = True
+            full_dst_dir = get_destdir_path(destdir, fullprefix, i.install_path)
+            self.log(f'Installing subdir {i.path} to {full_dst_dir}')
+            dm.makedirs(full_dst_dir, exist_ok=True)
+            self.do_copydir(d, i.path, full_dst_dir, i.exclude, i.install_mode, dm)
+
+    def install_data(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+        for i in d.data:
+            if not self.should_install(i):
+                continue
+            fullfilename = i.path
+            outfilename = get_destdir_path(destdir, fullprefix, i.install_path)
+            outdir = os.path.dirname(outfilename)
+            if self.do_copyfile(fullfilename, outfilename, makedirs=(dm, outdir)):
+                self.did_install_something = True
+            self.set_mode(outfilename, i.install_mode, d.install_umask)
+
+    def install_symlinks(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+        for s in d.symlinks:
+            if not self.should_install(s):
+                continue
+            full_dst_dir = get_destdir_path(destdir, fullprefix, s.install_path)
+            full_link_name = get_destdir_path(destdir, fullprefix, s.name)
+            dm.makedirs(full_dst_dir, exist_ok=True)
+            if self.do_symlink(s.target, full_link_name, destdir, full_dst_dir, s.allow_missing):
+                self.did_install_something = True
+
+    def install_man(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+        for m in d.man:
+            if not self.should_install(m):
+                continue
+            full_source_filename = m.path
+            outfilename = get_destdir_path(destdir, fullprefix, m.install_path)
+            outdir = os.path.dirname(outfilename)
+            if self.do_copyfile(full_source_filename, outfilename, makedirs=(dm, outdir)):
+                self.did_install_something = True
+            self.set_mode(outfilename, m.install_mode, d.install_umask)
+
+    def install_emptydir(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+        for e in d.emptydir:
+            if not self.should_install(e):
+                continue
+            self.did_install_something = True
+            full_dst_dir = get_destdir_path(destdir, fullprefix, e.path)
+            self.log(f'Installing new directory {full_dst_dir}')
+            if os.path.isfile(full_dst_dir):
+                print(f'Tried to create directory {full_dst_dir} but a file of that name already exists.')
+                sys.exit(1)
+            dm.makedirs(full_dst_dir, exist_ok=True)
+            self.set_mode(full_dst_dir, e.install_mode, d.install_umask)
+
+    def install_headers(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+        for t in d.headers:
+            if not self.should_install(t):
+                continue
+            fullfilename = t.path
+            fname = os.path.basename(fullfilename)
+            outdir = get_destdir_path(destdir, fullprefix, t.install_path)
+            outfilename = os.path.join(outdir, fname)
+            if self.do_copyfile(fullfilename, outfilename, makedirs=(dm, outdir)):
+                self.did_install_something = True
+            self.set_mode(outfilename, t.install_mode, d.install_umask)
+
+    def run_install_script(self, d: InstallData, destdir: str, fullprefix: str) -> None:
+        env = {'MESON_SOURCE_ROOT': d.source_dir,
+               'MESON_BUILD_ROOT': d.build_dir,
+               'MESONINTROSPECT': ' '.join([shlex.quote(x) for x in d.mesonintrospect]),
+               }
+        if self.options.quiet:
+            env['MESON_INSTALL_QUIET'] = '1'
+        if self.dry_run:
+            env['MESON_INSTALL_DRY_RUN'] = '1'
+
+        for i in d.install_scripts:
+            if not self.should_install(i):
+                continue
+
+            if i.installdir_map is not None:
+                mapp = i.installdir_map
+            else:
+                mapp = {'prefix': d.prefix}
+            localenv = env.copy()
+            localenv.update({'MESON_INSTALL_'+k.upper(): os.path.join(d.prefix, v) for k, v in mapp.items()})
+            localenv.update({'MESON_INSTALL_DESTDIR_'+k.upper(): get_destdir_path(destdir, fullprefix, v) for k, v in mapp.items()})
+
+            name = ' '.join(i.cmd_args)
+            if i.skip_if_destdir and destdir:
+                self.log(f'Skipping custom install script because DESTDIR is set {name!r}')
+                continue
+            self.did_install_something = True  # Custom script must report itself if it does nothing.
+            self.log(f'Running custom install script {name!r}')
+            try:
+                rc = self.run_exe(i, localenv)
+            except OSError:
+                print(f'FAILED: install script \'{name}\' could not be run, stopped')
+                # POSIX shells return 127 when a command could not be found
+                sys.exit(127)
+            if rc != 0:
+                print(f'FAILED: install script \'{name}\' exit code {rc}, stopped')
+                sys.exit(rc)
+
+    def install_targets(self, d: InstallData, dm: DirMaker, destdir: str, fullprefix: str) -> None:
+        for t in d.targets:
+            # In AIX, we archive our shared libraries.  When we install any package in AIX we need to
+            # install the archive in which the shared library exists. The below code does the same.
+            # We change the .so files having lt_version or so_version to archive file install.
+            if is_aix():
+                if '.so' in t.fname:
+                    t.fname = re.sub('[.][a]([.]?([0-9]+))*([.]?([a-z]+))*', '.a', t.fname.replace('.so', '.a'))
+            if not self.should_install(t):
+                continue
+            if not os.path.exists(t.fname):
+                # For example, import libraries of shared modules are optional
+                if t.optional:
+                    self.log(f'File {t.fname!r} not found, skipping')
+                    continue
+                else:
+                    raise MesonException(f'File {t.fname!r} could not be found')
+            file_copied = False # not set when a directory is copied
+            fname = check_for_stampfile(t.fname)
+            outdir = get_destdir_path(destdir, fullprefix, t.outdir)
+            outname = os.path.join(outdir, os.path.basename(fname))
+            final_path = os.path.join(d.prefix, t.outdir, os.path.basename(fname))
+            should_strip = t.strip or (t.can_strip and self.options.strip)
+            install_rpath = t.install_rpath
+            install_name_mappings = t.install_name_mappings
+            install_mode = t.install_mode
+            if not os.path.exists(fname):
+                raise MesonException(f'File {fname!r} could not be found')
+            elif os.path.isfile(fname):
+                file_copied = self.do_copyfile(fname, outname, makedirs=(dm, outdir))
+                if should_strip and d.strip_bin is not None:
+                    if fname.endswith('.jar'):
+                        self.log('Not stripping jar target: {}'.format(os.path.basename(fname)))
+                        continue
+                    self.do_strip(d.strip_bin, fname, outname)
+                if fname.endswith('.js'):
+                    # Emscripten outputs js files and optionally a wasm file.
+                    # If one was generated, install it as well.
+                    wasm_source = os.path.splitext(fname)[0] + '.wasm'
+                    if os.path.exists(wasm_source):
+                        wasm_output = os.path.splitext(outname)[0] + '.wasm'
+                        file_copied = self.do_copyfile(wasm_source, wasm_output)
+            elif os.path.isdir(fname):
+                fname = os.path.join(d.build_dir, fname.rstrip('/'))
+                outname = os.path.join(outdir, os.path.basename(fname))
+                dm.makedirs(outdir, exist_ok=True)
+                self.do_copydir(d, fname, outname, None, install_mode, dm)
+            else:
+                raise RuntimeError(f'Unknown file type for {fname!r}')
+            if file_copied:
+                self.did_install_something = True
+                try:
+                    self.fix_rpath(outname, t.rpath_dirs_to_remove, install_rpath, final_path,
+                                   install_name_mappings, verbose=False)
+                except SystemExit as e:
+                    if isinstance(e.code, int) and e.code == 0:
+                        pass
+                    else:
+                        raise
+                # file mode needs to be set last, after strip/depfixer editing
+                self.set_mode(outname, install_mode, d.install_umask)
+
+def rebuild_all(wd: str, backend: str) -> bool:
+    if backend == 'none':
+        # nothing to build...
+        return True
+    if backend != 'ninja':
+        print('Only ninja backend is supported to rebuild the project before installation.')
+        return True
+
+    ninja = environment.detect_ninja()
+    if not ninja:
+        print("Can't find ninja, can't rebuild test.")
+        return False
+
+    def drop_privileges() -> T.Tuple[T.Optional[EnvironOrDict], T.Optional[T.Callable[[], None]]]:
+        if not is_windows() and os.geteuid() == 0:
+            import pwd
+            env = os.environ.copy()
+
+            if os.environ.get('SUDO_USER') is not None:
+                orig_user = env.pop('SUDO_USER')
+                orig_uid = env.pop('SUDO_UID', 0)
+                orig_gid = env.pop('SUDO_GID', 0)
+                try:
+                    homedir = pwd.getpwuid(int(orig_uid)).pw_dir
+                except KeyError:
+                    # `sudo chroot` leaves behind stale variable and builds as root without a user
+                    return None, None
+            elif os.environ.get('DOAS_USER') is not None:
+                orig_user = env.pop('DOAS_USER')
+                try:
+                    pwdata = pwd.getpwnam(orig_user)
+                except KeyError:
+                    # `doas chroot` leaves behind stale variable and builds as root without a user
+                    return None, None
+                orig_uid = pwdata.pw_uid
+                orig_gid = pwdata.pw_gid
+                homedir = pwdata.pw_dir
+            else:
+                return None, None
+
+            if os.stat(os.path.join(wd, 'build.ninja')).st_uid != int(orig_uid):
+                # the entire build process is running with sudo, we can't drop privileges
+                return None, None
+
+            env['USER'] = orig_user
+            env['HOME'] = homedir
+
+            def wrapped() -> None:
+                print(f'Dropping privileges to {orig_user!r} before running ninja...')
+                if orig_gid is not None:
+                    os.setgid(int(orig_gid))
+                if orig_uid is not None:
+                    os.setuid(int(orig_uid))
+
+            return env, wrapped
+        else:
+            return None, None
+
+    env, preexec_fn = drop_privileges()
+    ret = subprocess.run(ninja + ['-C', wd], env=env, preexec_fn=preexec_fn).returncode
+    if ret != 0:
+        print(f'Could not rebuild {wd}')
+        return False
+
+    return True
+
+
+def run(opts: 'ArgumentType') -> int:
+    datafilename = 'meson-private/install.dat'
+    private_dir = os.path.dirname(datafilename)
+    log_dir = os.path.join(private_dir, '../meson-logs')
+    if not os.path.exists(os.path.join(opts.wd, datafilename)):
+        sys.exit('Install data not found. Run this command in build directory root.')
+    if not opts.no_rebuild:
+        b = build.load(opts.wd)
+        need_vsenv = T.cast('bool', b.environment.coredata.get_option(OptionKey('vsenv')))
+        setup_vsenv(need_vsenv)
+        backend = T.cast('str', b.environment.coredata.get_option(coredata.OptionKey('backend')))
+        if not rebuild_all(opts.wd, backend):
+            sys.exit(-1)
+    os.chdir(opts.wd)
+    with open(os.path.join(log_dir, 'install-log.txt'), 'w', encoding='utf-8') as lf:
+        installer = Installer(opts, lf)
+        append_to_log(lf, '# List of files installed by Meson')
+        append_to_log(lf, '# Does not contain files installed by custom scripts.')
+        if opts.profile:
+            import cProfile as profile
+            fname = os.path.join(private_dir, 'profile-installer.log')
+            profile.runctx('installer.do_install(datafilename)', globals(), locals(), filename=fname)
+        else:
+            installer.do_install(datafilename)
+    return 0
diff --git a/vendored-meson/meson/mesonbuild/mintro.py b/vendored-meson/meson/mesonbuild/mintro.py
new file mode 100644
index 000000000000..ab303b3f520b
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/mintro.py
@@ -0,0 +1,672 @@
+# Copyright 2014-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""This is a helper script for IDE developers. It allows you to
+extract information such as list of targets, files, compiler flags,
+tests and so on. All output is in JSON for simple parsing.
+
+Currently only works for the Ninja backend. Others use generated
+project files and don't need this info."""
+
+from contextlib import redirect_stdout
+import collections
+import dataclasses
+import json
+import os
+from pathlib import Path, PurePath
+import sys
+import typing as T
+
+from . import build, mesonlib, coredata as cdata
+from .ast import IntrospectionInterpreter, BUILD_TARGET_FUNCTIONS, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstJSONPrinter
+from .backend import backends
+from .dependencies import Dependency
+from . import environment
+from .interpreterbase import ObjectHolder
+from .mesonlib import OptionKey
+from .mparser import FunctionNode, ArrayNode, ArgumentNode, StringNode
+
+if T.TYPE_CHECKING:
+    import argparse
+
+    from .interpreter import Interpreter
+    from .mparser import BaseNode
+
+def get_meson_info_file(info_dir: str) -> str:
+    return os.path.join(info_dir, 'meson-info.json')
+
+def get_meson_introspection_version() -> str:
+    return '1.0.0'
+
+def get_meson_introspection_required_version() -> T.List[str]:
+    return ['>=1.0', '<2.0']
+
+class IntroCommand:
+    def __init__(self,
+                 desc: str,
+                 func: T.Optional[T.Callable[[], T.Union[dict, list]]] = None,
+                 no_bd: T.Optional[T.Callable[[IntrospectionInterpreter], T.Union[dict, list]]] = None) -> None:
+        self.desc = desc + '.'
+        self.func = func
+        self.no_bd = no_bd
+
+def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None,
+                                  builddata: T.Optional[build.Build] = None,
+                                  backend: T.Optional[backends.Backend] = None,
+                                  sourcedir: T.Optional[str] = None) -> 'T.Mapping[str, IntroCommand]':
+    if backend and builddata:
+        benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks())
+        testdata = backend.create_test_serialisation(builddata.get_tests())
+        installdata = backend.create_install_data()
+        interpreter = backend.interpreter
+    else:
+        benchmarkdata = testdata = installdata = None
+
+    # Enforce key order for argparse
+    return collections.OrderedDict([
+        ('ast', IntroCommand('Dump the AST of the meson file', no_bd=dump_ast)),
+        ('benchmarks', IntroCommand('List all benchmarks', func=lambda: list_benchmarks(benchmarkdata))),
+        ('buildoptions', IntroCommand('List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source)),
+        ('buildsystem_files', IntroCommand('List files that make up the build system', func=lambda: list_buildsystem_files(builddata, interpreter))),
+        ('compilers', IntroCommand('List used compilers', func=lambda: list_compilers(coredata))),
+        ('dependencies', IntroCommand('List external dependencies', func=lambda: list_deps(coredata, backend), no_bd=list_deps_from_source)),
+        ('scan_dependencies', IntroCommand('Scan for dependencies used in the meson.build file', no_bd=list_deps_from_source)),
+        ('installed', IntroCommand('List all installed files and directories', func=lambda: list_installed(installdata))),
+        ('install_plan', IntroCommand('List all installed files and directories with their details', func=lambda: list_install_plan(installdata))),
+        ('machines', IntroCommand('Information about host, build, and target machines', func=lambda: list_machines(builddata))),
+        ('projectinfo', IntroCommand('Information about projects', func=lambda: list_projinfo(builddata), no_bd=list_projinfo_from_source)),
+        ('targets', IntroCommand('List top level targets', func=lambda: list_targets(builddata, installdata, backend), no_bd=list_targets_from_source)),
+        ('tests', IntroCommand('List all unit tests', func=lambda: list_tests(testdata))),
+    ])
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+    intro_types = get_meson_introspection_types()
+    for key, val in intro_types.items():
+        flag = '--' + key.replace('_', '-')
+        parser.add_argument(flag, action='store_true', dest=key, default=False, help=val.desc)
+
+    parser.add_argument('--backend', choices=sorted(cdata.backendlist), dest='backend', default='ninja',
+                        help='The backend to use for the --buildoptions introspection.')
+    parser.add_argument('-a', '--all', action='store_true', dest='all', default=False,
+                        help='Print all available information.')
+    parser.add_argument('-i', '--indent', action='store_true', dest='indent', default=False,
+                        help='Enable pretty printed JSON.')
+    parser.add_argument('-f', '--force-object-output', action='store_true', dest='force_dict', default=False,
+                        help='Always use the new JSON format for multiple entries (even for 0 and 1 introspection commands)')
+    parser.add_argument('builddir', nargs='?', default='.', help='The build directory')
+
+def dump_ast(intr: IntrospectionInterpreter) -> T.Dict[str, T.Any]:
+    printer = AstJSONPrinter()
+    intr.ast.accept(printer)
+    return printer.result
+
+def list_installed(installdata: backends.InstallData) -> T.Dict[str, str]:
+    res = {}
+    if installdata is not None:
+        for t in installdata.targets:
+            res[os.path.join(installdata.build_dir, t.fname)] = \
+                os.path.join(installdata.prefix, t.outdir, os.path.basename(t.fname))
+        for i in installdata.data:
+            res[i.path] = os.path.join(installdata.prefix, i.install_path)
+        for i in installdata.headers:
+            res[i.path] = os.path.join(installdata.prefix, i.install_path, os.path.basename(i.path))
+        for i in installdata.man:
+            res[i.path] = os.path.join(installdata.prefix, i.install_path)
+        for i in installdata.install_subdirs:
+            res[i.path] = os.path.join(installdata.prefix, i.install_path)
+        for s in installdata.symlinks:
+            basename = os.path.basename(s.name)
+            res[basename] = os.path.join(installdata.prefix, s.install_path, basename)
+    return res
+
+def list_install_plan(installdata: backends.InstallData) -> T.Dict[str, T.Dict[str, T.Dict[str, T.Optional[str]]]]:
+    plan = {
+        'targets': {
+            os.path.join(installdata.build_dir, target.fname): {
+                'destination': target.out_name,
+                'tag': target.tag or None,
+                'subproject': target.subproject or None,
+            }
+            for target in installdata.targets
+        },
+    }  # type: T.Dict[str, T.Dict[str, T.Dict[str, T.Optional[str]]]]
+    for key, data_list in {
+        'data': installdata.data,
+        'man': installdata.man,
+        'headers': installdata.headers,
+        'install_subdirs': installdata.install_subdirs
+    }.items():
+        # Mypy doesn't recognize SubdirInstallData as a subclass of InstallDataBase
+        for data in data_list: # type: ignore[attr-defined]
+            data_type = data.data_type or key
+            install_path_name = data.install_path_name
+            if key == 'headers':  # in the headers, install_path_name is the directory
+                install_path_name = os.path.join(install_path_name, os.path.basename(data.path))
+
+            entry = {
+                'destination': install_path_name,
+                'tag': data.tag or None,
+                'subproject': data.subproject or None,
+            }
+
+            if key == 'install_subdirs':
+                exclude_files, exclude_dirs = data.exclude or ([], [])
+                entry['exclude_dirs'] = list(exclude_dirs)
+                entry['exclude_files'] = list(exclude_files)
+
+            plan[data_type] = plan.get(data_type, {})
+            plan[data_type][data.path] = entry
+
+    return plan
+
+def get_target_dir(coredata: cdata.CoreData, subdir: str) -> str:
+    if coredata.get_option(OptionKey('layout')) == 'flat':
+        return 'meson-out'
+    else:
+        return subdir
+
+def list_targets_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]:
+    tlist = []  # type: T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]
+    root_dir = Path(intr.source_root)
+
+    def nodes_to_paths(node_list: T.List[BaseNode]) -> T.List[Path]:
+        res = []  # type: T.List[Path]
+        for n in node_list:
+            args = []  # type: T.List[BaseNode]
+            if isinstance(n, FunctionNode):
+                args = list(n.args.arguments)
+                if n.func_name in BUILD_TARGET_FUNCTIONS:
+                    args.pop(0)
+            elif isinstance(n, ArrayNode):
+                args = n.args.arguments
+            elif isinstance(n, ArgumentNode):
+                args = n.arguments
+            for j in args:
+                if isinstance(j, StringNode):
+                    assert isinstance(j.value, str)
+                    res += [Path(j.value)]
+                elif isinstance(j, str):
+                    res += [Path(j)]
+        res = [root_dir / i['subdir'] / x for x in res]
+        res = [x.resolve() for x in res]
+        return res
+
+    for i in intr.targets:
+        sources = nodes_to_paths(i['sources'])
+        extra_f = nodes_to_paths(i['extra_files'])
+        outdir = get_target_dir(intr.coredata, i['subdir'])
+
+        tlist += [{
+            'name': i['name'],
+            'id': i['id'],
+            'type': i['type'],
+            'defined_in': i['defined_in'],
+            'filename': [os.path.join(outdir, x) for x in i['outputs']],
+            'build_by_default': i['build_by_default'],
+            'target_sources': [{
+                'language': 'unknown',
+                'compiler': [],
+                'parameters': [],
+                'sources': [str(x) for x in sources],
+                'generated_sources': []
+            }],
+            'depends': [],
+            'extra_files': [str(x) for x in extra_f],
+            'subproject': None, # Subprojects are not supported
+            'installed': i['installed']
+        }]
+
+    return tlist
+
+def list_targets(builddata: build.Build, installdata: backends.InstallData, backend: backends.Backend) -> T.List[T.Any]:
+    tlist = []  # type: T.List[T.Any]
+    build_dir = builddata.environment.get_build_dir()
+    src_dir = builddata.environment.get_source_dir()
+
+    # Fast lookup table for installation files
+    install_lookuptable = {}
+    for i in installdata.targets:
+        basename = os.path.basename(i.fname)
+        install_lookuptable[basename] = [str(PurePath(installdata.prefix, i.outdir, basename))]
+    for s in installdata.symlinks:
+        # Symlink's target must already be in the table. They share the same list
+        # to support symlinks to symlinks recursively, such as .so -> .so.0 -> .so.1.2.3
+        basename = os.path.basename(s.name)
+        try:
+            install_lookuptable[basename] = install_lookuptable[os.path.basename(s.target)]
+            install_lookuptable[basename].append(str(PurePath(installdata.prefix, s.install_path, basename)))
+        except KeyError:
+            pass
+
+    for (idname, target) in builddata.get_targets().items():
+        if not isinstance(target, build.Target):
+            raise RuntimeError('The target object in `builddata.get_targets()` is not of type `build.Target`. Please file a bug with this error message.')
+
+        outdir = get_target_dir(builddata.environment.coredata, target.subdir)
+        t = {
+            'name': target.get_basename(),
+            'id': idname,
+            'type': target.get_typename(),
+            'defined_in': os.path.normpath(os.path.join(src_dir, target.subdir, environment.build_filename)),
+            'filename': [os.path.join(build_dir, outdir, x) for x in target.get_outputs()],
+            'build_by_default': target.build_by_default,
+            'target_sources': backend.get_introspection_data(idname, target),
+            'extra_files': [os.path.normpath(os.path.join(src_dir, x.subdir, x.fname)) for x in target.extra_files],
+            'subproject': target.subproject or None,
+            'dependencies': [d.name for d in getattr(target, 'external_deps', [])],
+            'depends': [lib.get_id() for lib in getattr(target, 'dependencies', [])]
+        }
+
+        vs_module_defs = getattr(target, 'vs_module_defs', None)
+        if vs_module_defs is not None:
+            t['vs_module_defs'] = vs_module_defs.relative_name()
+        win_subsystem = getattr(target, 'win_subsystem', None)
+        if win_subsystem is not None:
+            t['win_subsystem'] = win_subsystem
+
+        if installdata and target.should_install():
+            t['installed'] = True
+            ifn = [install_lookuptable.get(x, [None]) for x in target.get_outputs()]
+            t['install_filename'] = [x for sublist in ifn for x in sublist]  # flatten the list
+        else:
+            t['installed'] = False
+        tlist.append(t)
+    return tlist
+
+def list_buildoptions_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]:
+    subprojects = [i['name'] for i in intr.project_data['subprojects']]
+    return list_buildoptions(intr.coredata, subprojects)
+
+def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[str]] = None) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]:
+    optlist = []  # type: T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]
+    subprojects = subprojects or []
+
+    dir_option_names = set(cdata.BUILTIN_DIR_OPTIONS)
+    test_option_names = {OptionKey('errorlogs'),
+                         OptionKey('stdsplit')}
+
+    dir_options: 'cdata.MutableKeyedOptionDictType' = {}
+    test_options: 'cdata.MutableKeyedOptionDictType' = {}
+    core_options: 'cdata.MutableKeyedOptionDictType' = {}
+    for k, v in coredata.options.items():
+        if k in dir_option_names:
+            dir_options[k] = v
+        elif k in test_option_names:
+            test_options[k] = v
+        elif k.is_builtin():
+            core_options[k] = v
+            if not v.yielding:
+                for s in subprojects:
+                    core_options[k.evolve(subproject=s)] = v
+
+    def add_keys(options: 'cdata.KeyedOptionDictType', section: str) -> None:
+        for key, opt in sorted(options.items()):
+            optdict = {'name': str(key), 'value': opt.value, 'section': section,
+                       'machine': key.machine.get_lower_case_name() if coredata.is_per_machine_option(key) else 'any'}
+            if isinstance(opt, cdata.UserStringOption):
+                typestr = 'string'
+            elif isinstance(opt, cdata.UserBooleanOption):
+                typestr = 'boolean'
+            elif isinstance(opt, cdata.UserComboOption):
+                optdict['choices'] = opt.choices
+                typestr = 'combo'
+            elif isinstance(opt, cdata.UserIntegerOption):
+                typestr = 'integer'
+            elif isinstance(opt, cdata.UserArrayOption):
+                typestr = 'array'
+                if opt.choices:
+                    optdict['choices'] = opt.choices
+            else:
+                raise RuntimeError("Unknown option type")
+            optdict['type'] = typestr
+            optdict['description'] = opt.description
+            optlist.append(optdict)
+
+    add_keys(core_options, 'core')
+    add_keys({k: v for k, v in coredata.options.items() if k.is_backend()}, 'backend')
+    add_keys({k: v for k, v in coredata.options.items() if k.is_base()}, 'base')
+    add_keys(
+        {k: v for k, v in sorted(coredata.options.items(), key=lambda i: i[0].machine) if k.is_compiler()},
+        'compiler',
+    )
+    add_keys(dir_options, 'directory')
+    add_keys({k: v for k, v in coredata.options.items() if k.is_project()}, 'user')
+    add_keys(test_options, 'test')
+    return optlist
+
+def find_buildsystem_files_list(src_dir: str) -> T.List[str]:
+    build_files = frozenset({'meson.build', 'meson.options', 'meson_options.txt'})
+    # I feel dirty about this. But only slightly.
+    filelist: T.List[str] = []
+    for root, _, files in os.walk(src_dir):
+        filelist.extend(os.path.relpath(os.path.join(root, f), src_dir)
+                        for f in build_files.intersection(files))
+    return filelist
+
+def list_buildsystem_files(builddata: build.Build, interpreter: Interpreter) -> T.List[str]:
+    src_dir = builddata.environment.get_source_dir()
+    filelist = list(interpreter.get_build_def_files())
+    filelist = [PurePath(src_dir, x).as_posix() for x in filelist]
+    return filelist
+
+def list_compilers(coredata: cdata.CoreData) -> T.Dict[str, T.Dict[str, T.Dict[str, str]]]:
+    compilers: T.Dict[str, T.Dict[str, T.Dict[str, str]]] = {}
+    for machine in ('host', 'build'):
+        compilers[machine] = {}
+        for language, compiler in getattr(coredata.compilers, machine).items():
+            compilers[machine][language] = {
+                'id': compiler.get_id(),
+                'exelist': compiler.get_exelist(),
+                'linker_exelist': compiler.get_linker_exelist(),
+                'file_suffixes': compiler.file_suffixes,
+                'default_suffix': compiler.get_default_suffix(),
+                'version': compiler.version,
+                'full_version': compiler.full_version,
+                'linker_id': compiler.get_linker_id(),
+            }
+    return compilers
+
+def list_deps_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool]]]:
+    result = []  # type: T.List[T.Dict[str, T.Union[str, bool]]]
+    for i in intr.dependencies:
+        keys = [
+            'name',
+            'required',
+            'version',
+            'has_fallback',
+            'conditional',
+        ]
+        result += [{k: v for k, v in i.items() if k in keys}]
+    return result
+
+def list_deps(coredata: cdata.CoreData, backend: backends.Backend) -> T.List[T.Dict[str, T.Union[str, T.List[str]]]]:
+    result: T.Dict[str, T.Dict[str, T.Union[str, T.List[str]]]] = {}
+
+    def _src_to_str(src_file: T.Union[mesonlib.FileOrString, build.CustomTarget, build.StructuredSources, build.CustomTargetIndex, build.GeneratedList]) -> T.List[str]:
+        if isinstance(src_file, str):
+            return [src_file]
+        if isinstance(src_file, mesonlib.File):
+            return [src_file.absolute_path(backend.source_dir, backend.build_dir)]
+        if isinstance(src_file, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)):
+            return src_file.get_outputs()
+        if isinstance(src_file, build.StructuredSources):
+            return [f for s in src_file.as_list() for f in _src_to_str(s)]
+        raise mesonlib.MesonBugException(f'Invalid file type {type(src_file)}.')
+
+    def _create_result(d: Dependency, varname: T.Optional[str] = None) -> T.Dict[str, T.Any]:
+        return {
+            'name': d.name,
+            'type': d.type_name,
+            'version': d.get_version(),
+            'compile_args': d.get_compile_args(),
+            'link_args': d.get_link_args(),
+            'include_directories': [i for idirs in d.get_include_dirs() for i in idirs.to_string_list(backend.source_dir)],
+            'sources': [f for s in d.get_sources() for f in _src_to_str(s)],
+            'extra_files': [f for s in d.get_extra_files() for f in _src_to_str(s)],
+            'dependencies': [e.name for e in d.ext_deps],
+            'depends': [lib.get_id() for lib in getattr(d, 'libraries', [])],
+            'meson_variables': [varname] if varname else [],
+        }
+
+    for d in coredata.deps.host.values():
+        if d.found():
+            result[d.name] = _create_result(d)
+
+    for varname, holder in backend.interpreter.variables.items():
+        if isinstance(holder, ObjectHolder):
+            d = holder.held_object
+            if isinstance(d, Dependency) and d.found():
+                if d.name in result:
+                    T.cast(T.List[str], result[d.name]['meson_variables']).append(varname)
+                else:
+                    result[d.name] = _create_result(d, varname)
+
+    return list(result.values())
+
+def get_test_list(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
+    result = []  # type: T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]
+    for t in testdata:
+        to = {}  # type: T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]
+        if isinstance(t.fname, str):
+            fname = [t.fname]
+        else:
+            fname = t.fname
+        to['cmd'] = fname + t.cmd_args
+        if isinstance(t.env, build.EnvironmentVariables):
+            to['env'] = t.env.get_env({})
+        else:
+            to['env'] = t.env
+        to['name'] = t.name
+        to['workdir'] = t.workdir
+        to['timeout'] = t.timeout
+        to['suite'] = t.suite
+        to['is_parallel'] = t.is_parallel
+        to['priority'] = t.priority
+        to['protocol'] = str(t.protocol)
+        to['depends'] = t.depends
+        to['extra_paths'] = t.extra_paths
+        result.append(to)
+    return result
+
+def list_tests(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
+    return get_test_list(testdata)
+
+def list_benchmarks(benchdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]:
+    return get_test_list(benchdata)
+
+def list_machines(builddata: build.Build) -> T.Dict[str, T.Dict[str, T.Union[str, bool]]]:
+    machines: T.Dict[str, T.Dict[str, T.Union[str, bool]]] = {}
+    for m in ('host', 'build', 'target'):
+        machine = getattr(builddata.environment.machines, m)
+        machines[m] = dataclasses.asdict(machine)
+        machines[m]['is_64_bit'] = machine.is_64_bit
+        machines[m]['exe_suffix'] = machine.get_exe_suffix()
+        machines[m]['object_suffix'] = machine.get_object_suffix()
+    return machines
+
+def list_projinfo(builddata: build.Build) -> T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]:
+    result = {'version': builddata.project_version,
+              'descriptive_name': builddata.project_name,
+              'subproject_dir': builddata.subproject_dir}    # type: T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]
+    subprojects = []
+    for k, v in builddata.subprojects.items():
+        c = {'name': k,
+             'version': v,
+             'descriptive_name': builddata.projects.get(k)}  # type: T.Dict[str, str]
+        subprojects.append(c)
+    result['subprojects'] = subprojects
+    return result
+
+def list_projinfo_from_source(intr: IntrospectionInterpreter) -> T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]:
+    sourcedir = intr.source_root
+    files = find_buildsystem_files_list(sourcedir)
+    files = [os.path.normpath(x) for x in files]
+
+    for i in intr.project_data['subprojects']:
+        basedir = os.path.join(intr.subproject_dir, i['name'])
+        i['buildsystem_files'] = [x for x in files if x.startswith(basedir)]
+        files = [x for x in files if not x.startswith(basedir)]
+
+    intr.project_data['buildsystem_files'] = files
+    intr.project_data['subproject_dir'] = intr.subproject_dir
+    return intr.project_data
+
+def print_results(options: argparse.Namespace, results: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], indent: int) -> int:
+    if not results and not options.force_dict:
+        print('No command specified')
+        return 1
+    elif len(results) == 1 and not options.force_dict:
+        # Make to keep the existing output format for a single option
+        print(json.dumps(results[0][1], indent=indent))
+    else:
+        out = {}
+        for i in results:
+            out[i[0]] = i[1]
+        print(json.dumps(out, indent=indent))
+    return 0
+
+def get_infodir(builddir: T.Optional[str] = None) -> str:
+    infodir = 'meson-info'
+    if builddir is not None:
+        infodir = os.path.join(builddir, infodir)
+    return infodir
+
+def get_info_file(infodir: str, kind: T.Optional[str] = None) -> str:
+    return os.path.join(infodir,
+                        'meson-info.json' if not kind else f'intro-{kind}.json')
+
+def load_info_file(infodir: str, kind: T.Optional[str] = None) -> T.Any:
+    with open(get_info_file(infodir, kind), encoding='utf-8') as fp:
+        return json.load(fp)
+
+def run(options: argparse.Namespace) -> int:
+    datadir = 'meson-private'
+    infodir = get_infodir(options.builddir)
+    if options.builddir is not None:
+        datadir = os.path.join(options.builddir, datadir)
+    indent = 4 if options.indent else None
+    results = []  # type: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]]
+    sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11]
+    intro_types = get_meson_introspection_types(sourcedir=sourcedir)
+
+    if 'meson.build' in [os.path.basename(options.builddir), options.builddir]:
+        # Make sure that log entries in other parts of meson don't interfere with the JSON output
+        with redirect_stdout(sys.stderr):
+            backend = backends.get_backend_from_name(options.backend)
+            assert backend is not None
+            intr = IntrospectionInterpreter(sourcedir, '', backend.name, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()])
+            intr.analyze()
+
+        for key, val in intro_types.items():
+            if (not options.all and not getattr(options, key, False)) or not val.no_bd:
+                continue
+            results += [(key, val.no_bd(intr))]
+        return print_results(options, results, indent)
+
+    try:
+        raw = load_info_file(infodir)
+        intro_vers = raw.get('introspection', {}).get('version', {}).get('full', '0.0.0')
+    except FileNotFoundError:
+        if not os.path.isdir(datadir) or not os.path.isdir(infodir):
+            print('Current directory is not a meson build directory.\n'
+                  'Please specify a valid build dir or change the working directory to it.')
+        else:
+            print('Introspection file {} does not exist.\n'
+                  'It is also possible that the build directory was generated with an old\n'
+                  'meson version. Please regenerate it in this case.'.format(get_info_file(infodir)))
+        return 1
+
+    vers_to_check = get_meson_introspection_required_version()
+    for i in vers_to_check:
+        if not mesonlib.version_compare(intro_vers, i):
+            print('Introspection version {} is not supported. '
+                  'The required version is: {}'
+                  .format(intro_vers, ' and '.join(vers_to_check)))
+            return 1
+
+    # Extract introspection information from JSON
+    for i, v in intro_types.items():
+        if not v.func:
+            continue
+        if not options.all and not getattr(options, i, False):
+            continue
+        try:
+            results += [(i, load_info_file(infodir, i))]
+        except FileNotFoundError:
+            print('Introspection file {} does not exist.'.format(get_info_file(infodir, i)))
+            return 1
+
+    return print_results(options, results, indent)
+
+updated_introspection_files = []  # type: T.List[str]
+
+def write_intro_info(intro_info: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], info_dir: str) -> None:
+    for kind, data in intro_info:
+        out_file = os.path.join(info_dir, f'intro-{kind}.json')
+        tmp_file = os.path.join(info_dir, 'tmp_dump.json')
+        with open(tmp_file, 'w', encoding='utf-8') as fp:
+            json.dump(data, fp)
+            fp.flush() # Not sure if this is needed
+        os.replace(tmp_file, out_file)
+        updated_introspection_files.append(kind)
+
+def generate_introspection_file(builddata: build.Build, backend: backends.Backend) -> None:
+    coredata = builddata.environment.get_coredata()
+    intro_types = get_meson_introspection_types(coredata=coredata, builddata=builddata, backend=backend)
+    intro_info = []  # type: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]]
+
+    for key, val in intro_types.items():
+        if not val.func:
+            continue
+        intro_info += [(key, val.func())]
+
+    write_intro_info(intro_info, builddata.environment.info_dir)
+
+def update_build_options(coredata: cdata.CoreData, info_dir: str) -> None:
+    intro_info = [
+        ('buildoptions', list_buildoptions(coredata))
+    ]
+
+    write_intro_info(intro_info, info_dir)
+
+def split_version_string(version: str) -> T.Dict[str, T.Union[str, int]]:
+    vers_list = version.split('.')
+    return {
+        'full': version,
+        'major': int(vers_list[0] if len(vers_list) > 0 else 0),
+        'minor': int(vers_list[1] if len(vers_list) > 1 else 0),
+        'patch': int(vers_list[2] if len(vers_list) > 2 else 0)
+    }
+
+def write_meson_info_file(builddata: build.Build, errors: list, build_files_updated: bool = False) -> None:
+    info_dir = builddata.environment.info_dir
+    info_file = get_meson_info_file(info_dir)
+    intro_types = get_meson_introspection_types()
+    intro_info = {}
+
+    for i, v in intro_types.items():
+        if not v.func:
+            continue
+        intro_info[i] = {
+            'file': f'intro-{i}.json',
+            'updated': i in updated_introspection_files
+        }
+
+    info_data = {
+        'meson_version': split_version_string(cdata.version),
+        'directories': {
+            'source': builddata.environment.get_source_dir(),
+            'build': builddata.environment.get_build_dir(),
+            'info': info_dir,
+        },
+        'introspection': {
+            'version': split_version_string(get_meson_introspection_version()),
+            'information': intro_info,
+        },
+        'build_files_updated': build_files_updated,
+    }
+
+    if errors:
+        info_data['error'] = True
+        info_data['error_list'] = [x if isinstance(x, str) else str(x) for x in errors]
+    else:
+        info_data['error'] = False
+
+    # Write the data to disc
+    tmp_file = os.path.join(info_dir, 'tmp_dump.json')
+    with open(tmp_file, 'w', encoding='utf-8') as fp:
+        json.dump(info_data, fp)
+        fp.flush()
+    os.replace(tmp_file, info_file)
diff --git a/vendored-meson/meson/mesonbuild/mlog.py b/vendored-meson/meson/mesonbuild/mlog.py
new file mode 100644
index 000000000000..3c95ee84abca
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/mlog.py
@@ -0,0 +1,547 @@
+# Copyright 2013-2014 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This is (mostly) a standalone module used to write logging
+information about Meson runs. Some output goes to screen,
+some to logging dir and some goes to both."""
+
+from __future__ import annotations
+
+import enum
+import os
+import io
+import sys
+import time
+import platform
+import shlex
+import subprocess
+import shutil
+import typing as T
+from contextlib import contextmanager
+from dataclasses import dataclass, field
+from pathlib import Path
+
+if T.TYPE_CHECKING:
+    from ._typing import StringProtocol, SizedStringProtocol
+
+    from .mparser import BaseNode
+
+    TV_Loggable = T.Union[str, 'AnsiDecorator', StringProtocol]
+    TV_LoggableList = T.List[TV_Loggable]
+
+def is_windows() -> bool:
+    platname = platform.system().lower()
+    return platname == 'windows'
+
+def _windows_ansi() -> bool:
+    # windll only exists on windows, so mypy will get mad
+    from ctypes import windll, byref  # type: ignore
+    from ctypes.wintypes import DWORD
+
+    kernel = windll.kernel32
+    stdout = kernel.GetStdHandle(-11)
+    mode = DWORD()
+    if not kernel.GetConsoleMode(stdout, byref(mode)):
+        return False
+    # ENABLE_VIRTUAL_TERMINAL_PROCESSING == 0x4
+    # If the call to enable VT processing fails (returns 0), we fallback to
+    # original behavior
+    return bool(kernel.SetConsoleMode(stdout, mode.value | 0x4) or os.environ.get('ANSICON'))
+
+def colorize_console() -> bool:
+    _colorize_console = getattr(sys.stdout, 'colorize_console', None)  # type: bool
+    if _colorize_console is not None:
+        return _colorize_console
+
+    try:
+        if is_windows():
+            _colorize_console = os.isatty(sys.stdout.fileno()) and _windows_ansi()
+        else:
+            _colorize_console = os.isatty(sys.stdout.fileno()) and os.environ.get('TERM', 'dumb') != 'dumb'
+    except Exception:
+        _colorize_console = False
+
+    sys.stdout.colorize_console = _colorize_console  # type: ignore[attr-defined]
+    return _colorize_console
+
+def setup_console() -> None:
+    # on Windows, a subprocess might call SetConsoleMode() on the console
+    # connected to stdout and turn off ANSI escape processing. Call this after
+    # running a subprocess to ensure we turn it on again.
+    if is_windows():
+        try:
+            delattr(sys.stdout, 'colorize_console')
+        except AttributeError:
+            pass
+
+_in_ci = 'CI' in os.environ
+
+
+class _Severity(enum.Enum):
+
+    NOTICE = enum.auto()
+    WARNING = enum.auto()
+    ERROR = enum.auto()
+    DEPRECATION = enum.auto()
+
+@dataclass
+class _Logger:
+
+    log_dir: T.Optional[str] = None
+    log_depth: T.List[str] = field(default_factory=list)
+    log_file: T.Optional[T.TextIO] = None
+    log_timestamp_start: T.Optional[float] = None
+    log_fatal_warnings = False
+    log_disable_stdout = False
+    log_errors_only = False
+    logged_once: T.Set[T.Tuple[str, ...]] = field(default_factory=set)
+    log_warnings_counter = 0
+    log_pager: T.Optional['subprocess.Popen'] = None
+
+    _LOG_FNAME: T.ClassVar[str] = 'meson-log.txt'
+
+    @contextmanager
+    def no_logging(self) -> T.Iterator[None]:
+        self.log_disable_stdout = True
+        try:
+            yield
+        finally:
+            self.log_disable_stdout = False
+
+    @contextmanager
+    def force_logging(self) -> T.Iterator[None]:
+        restore = self.log_disable_stdout
+        self.log_disable_stdout = False
+        try:
+            yield
+        finally:
+            self.log_disable_stdout = restore
+
+    def set_quiet(self) -> None:
+        self.log_errors_only = True
+
+    def set_verbose(self) -> None:
+        self.log_errors_only = False
+
+    def set_timestamp_start(self, start: float) -> None:
+        self.log_timestamp_start = start
+
+    def shutdown(self) -> T.Optional[str]:
+        if self.log_file is not None:
+            path = self.log_file.name
+            exception_around_goer = self.log_file
+            self.log_file = None
+            exception_around_goer.close()
+            return path
+        self.stop_pager()
+        return None
+
+    def start_pager(self) -> None:
+        if not colorize_console():
+            return
+        pager_cmd = []
+        if 'PAGER' in os.environ:
+            pager_cmd = shlex.split(os.environ['PAGER'])
+        else:
+            less = shutil.which('less')
+            if not less and is_windows():
+                git = shutil.which('git')
+                if git:
+                    path = Path(git).parents[1] / 'usr' / 'bin'
+                    less = shutil.which('less', path=str(path))
+            if less:
+                pager_cmd = [less]
+        if not pager_cmd:
+            return
+        try:
+            # Set 'LESS' environment variable, rather than arguments in
+            # pager_cmd, to also support the case where the user has 'PAGER'
+            # set to 'less'. Arguments set are:
+            # "R" : support color
+            # "X" : do not clear the screen when leaving the pager
+            # "F" : skip the pager if content fits into the screen
+            env = os.environ.copy()
+            if 'LESS' not in env:
+                env['LESS'] = 'RXF'
+            # Set "-c" for lv to support color
+            if 'LV' not in env:
+                env['LV'] = '-c'
+            self.log_pager = subprocess.Popen(pager_cmd, stdin=subprocess.PIPE,
+                                              text=True, encoding='utf-8', env=env)
+        except Exception as e:
+            # Ignore errors, unless it is a user defined pager.
+            if 'PAGER' in os.environ:
+                from .mesonlib import MesonException
+                raise MesonException(f'Failed to start pager: {str(e)}')
+
+    def stop_pager(self) -> None:
+        if self.log_pager:
+            try:
+                self.log_pager.stdin.flush()
+                self.log_pager.stdin.close()
+            except BrokenPipeError:
+                pass
+            self.log_pager.wait()
+            self.log_pager = None
+
+    def initialize(self, logdir: str, fatal_warnings: bool = False) -> None:
+        self.log_dir = logdir
+        self.log_file = open(os.path.join(logdir, self._LOG_FNAME), 'w', encoding='utf-8')
+        self.log_fatal_warnings = fatal_warnings
+
+    def process_markup(self, args: T.Sequence[TV_Loggable], keep: bool) -> T.List[str]:
+        arr = []  # type: T.List[str]
+        if self.log_timestamp_start is not None:
+            arr = ['[{:.3f}]'.format(time.monotonic() - self.log_timestamp_start)]
+        for arg in args:
+            if arg is None:
+                continue
+            if isinstance(arg, str):
+                arr.append(arg)
+            elif isinstance(arg, AnsiDecorator):
+                arr.append(arg.get_text(keep))
+            else:
+                arr.append(str(arg))
+        return arr
+
+    def force_print(self, *args: str, nested: bool, sep: T.Optional[str] = None,
+                    end: T.Optional[str] = None) -> None:
+        if self.log_disable_stdout:
+            return
+        iostr = io.StringIO()
+        print(*args, sep=sep, end=end, file=iostr)
+
+        raw = iostr.getvalue()
+        if self.log_depth:
+            prepend = self.log_depth[-1] + '| ' if nested else ''
+            lines = []
+            for l in raw.split('\n'):
+                l = l.strip()
+                lines.append(prepend + l if l else '')
+            raw = '\n'.join(lines)
+
+        # _Something_ is going to get printed.
+        try:
+            output = self.log_pager.stdin if self.log_pager else None
+            print(raw, end='', file=output)
+        except UnicodeEncodeError:
+            cleaned = raw.encode('ascii', 'replace').decode('ascii')
+            print(cleaned, end='')
+
+    def debug(self, *args: TV_Loggable, sep: T.Optional[str] = None,
+              end: T.Optional[str] = None) -> None:
+        arr = process_markup(args, False)
+        if self.log_file is not None:
+            print(*arr, file=self.log_file, sep=sep, end=end)
+            self.log_file.flush()
+
+    def _log(self, *args: TV_Loggable, is_error: bool = False,
+             nested: bool = True, sep: T.Optional[str] = None,
+             end: T.Optional[str] = None) -> None:
+        arr = process_markup(args, False)
+        if self.log_file is not None:
+            print(*arr, file=self.log_file, sep=sep, end=end)
+            self.log_file.flush()
+        if colorize_console():
+            arr = process_markup(args, True)
+        if not self.log_errors_only or is_error:
+            force_print(*arr, nested=nested, sep=sep, end=end)
+
+    def _debug_log_cmd(self, cmd: str, args: T.List[str]) -> None:
+        if not _in_ci:
+            return
+        args = [f'"{x}"' for x in args]  # Quote all args, just in case
+        self.debug('!meson_ci!/{} {}'.format(cmd, ' '.join(args)))
+
+    def cmd_ci_include(self, file: str) -> None:
+        self._debug_log_cmd('ci_include', [file])
+
+    def log(self, *args: TV_Loggable, is_error: bool = False,
+            once: bool = False, nested: bool = True,
+            sep: T.Optional[str] = None,
+            end: T.Optional[str] = None) -> None:
+        if once:
+            self._log_once(*args, is_error=is_error, nested=nested, sep=sep, end=end)
+        else:
+            self._log(*args, is_error=is_error, nested=nested, sep=sep, end=end)
+
+    def _log_once(self, *args: TV_Loggable, is_error: bool = False,
+                  nested: bool = True, sep: T.Optional[str] = None,
+                  end: T.Optional[str] = None) -> None:
+        """Log variant that only prints a given message one time per meson invocation.
+
+        This considers ansi decorated values by the values they wrap without
+        regard for the AnsiDecorator itself.
+        """
+        def to_str(x: TV_Loggable) -> str:
+            if isinstance(x, str):
+                return x
+            if isinstance(x, AnsiDecorator):
+                return x.text
+            return str(x)
+        t = tuple(to_str(a) for a in args)
+        if t in self.logged_once:
+            return
+        self.logged_once.add(t)
+        self._log(*args, is_error=is_error, nested=nested, sep=sep, end=end)
+
+    def _log_error(self, severity: _Severity, *rargs: TV_Loggable,
+                   once: bool = False, fatal: bool = True,
+                   location: T.Optional[BaseNode] = None,
+                   nested: bool = True, sep: T.Optional[str] = None,
+                   end: T.Optional[str] = None,
+                   is_error: bool = True) -> None:
+        from .mesonlib import MesonException, relpath
+
+        # The typing requirements here are non-obvious. Lists are invariant,
+        # therefore T.List[A] and T.List[T.Union[A, B]] are not able to be joined
+        if severity is _Severity.NOTICE:
+            label = [bold('NOTICE:')]  # type: TV_LoggableList
+        elif severity is _Severity.WARNING:
+            label = [yellow('WARNING:')]
+        elif severity is _Severity.ERROR:
+            label = [red('ERROR:')]
+        elif severity is _Severity.DEPRECATION:
+            label = [red('DEPRECATION:')]
+        # rargs is a tuple, not a list
+        args = label + list(rargs)
+
+        if location is not None:
+            location_file = relpath(location.filename, os.getcwd())
+            location_str = get_error_location_string(location_file, location.lineno)
+            # Unions are frankly awful, and we have to T.cast here to get mypy
+            # to understand that the list concatenation is safe
+            location_list = T.cast('TV_LoggableList', [location_str])
+            args = location_list + args
+
+        log(*args, once=once, nested=nested, sep=sep, end=end, is_error=is_error)
+
+        self.log_warnings_counter += 1
+
+        if self.log_fatal_warnings and fatal:
+            raise MesonException("Fatal warnings enabled, aborting")
+
+    def error(self, *args: TV_Loggable,
+              once: bool = False, fatal: bool = True,
+              location: T.Optional[BaseNode] = None,
+              nested: bool = True, sep: T.Optional[str] = None,
+              end: T.Optional[str] = None) -> None:
+        return self._log_error(_Severity.ERROR, *args, once=once, fatal=fatal, location=location,
+                               nested=nested, sep=sep, end=end, is_error=True)
+
+    def warning(self, *args: TV_Loggable,
+                once: bool = False, fatal: bool = True,
+                location: T.Optional[BaseNode] = None,
+                nested: bool = True, sep: T.Optional[str] = None,
+                end: T.Optional[str] = None) -> None:
+        return self._log_error(_Severity.WARNING, *args, once=once, fatal=fatal, location=location,
+                               nested=nested, sep=sep, end=end, is_error=True)
+
+    def deprecation(self, *args: TV_Loggable,
+                    once: bool = False, fatal: bool = True,
+                    location: T.Optional[BaseNode] = None,
+                    nested: bool = True, sep: T.Optional[str] = None,
+                    end: T.Optional[str] = None) -> None:
+        return self._log_error(_Severity.DEPRECATION, *args, once=once, fatal=fatal, location=location,
+                               nested=nested, sep=sep, end=end, is_error=True)
+
+    def notice(self, *args: TV_Loggable,
+               once: bool = False, fatal: bool = True,
+               location: T.Optional[BaseNode] = None,
+               nested: bool = True, sep: T.Optional[str] = None,
+               end: T.Optional[str] = None) -> None:
+        return self._log_error(_Severity.NOTICE, *args, once=once, fatal=fatal, location=location,
+                               nested=nested, sep=sep, end=end, is_error=False)
+
+    def exception(self, e: Exception, prefix: T.Optional[AnsiDecorator] = None) -> None:
+        if prefix is None:
+            prefix = red('ERROR:')
+        self.log()
+        args = []  # type: T.List[T.Union[AnsiDecorator, str]]
+        if all(getattr(e, a, None) is not None for a in ['file', 'lineno', 'colno']):
+            # Mypy doesn't follow hasattr, and it's pretty easy to visually inspect
+            # that this is correct, so we'll just ignore it.
+            path = get_relative_path(Path(e.file), Path(os.getcwd()))  # type: ignore
+            args.append(f'{path}:{e.lineno}:{e.colno}:')  # type: ignore
+        if prefix:
+            args.append(prefix)
+        args.append(str(e))
+
+        with self.force_logging():
+            self.log(*args, is_error=True)
+
+    @contextmanager
+    def nested(self, name: str = '') -> T.Generator[None, None, None]:
+        self.log_depth.append(name)
+        try:
+            yield
+        finally:
+            self.log_depth.pop()
+
+    def get_log_dir(self) -> str:
+        return self.log_dir
+
+    def get_log_depth(self) -> int:
+        return len(self.log_depth)
+
+    @contextmanager
+    def nested_warnings(self) -> T.Iterator[None]:
+        old = self.log_warnings_counter
+        self.log_warnings_counter = 0
+        try:
+            yield
+        finally:
+            self.log_warnings_counter = old
+
+    def get_warning_count(self) -> int:
+        return self.log_warnings_counter
+
+_logger = _Logger()
+cmd_ci_include = _logger.cmd_ci_include
+debug = _logger.debug
+deprecation = _logger.deprecation
+error = _logger.error
+exception = _logger.exception
+force_print = _logger.force_print
+get_log_depth = _logger.get_log_depth
+get_log_dir = _logger.get_log_dir
+get_warning_count = _logger.get_warning_count
+initialize = _logger.initialize
+log = _logger.log
+nested = _logger.nested
+nested_warnings = _logger.nested_warnings
+no_logging = _logger.no_logging
+notice = _logger.notice
+process_markup = _logger.process_markup
+set_quiet = _logger.set_quiet
+set_timestamp_start = _logger.set_timestamp_start
+set_verbose = _logger.set_verbose
+shutdown = _logger.shutdown
+start_pager = _logger.start_pager
+stop_pager = _logger.stop_pager
+warning = _logger.warning
+
+class AnsiDecorator:
+    plain_code = "\033[0m"
+
+    def __init__(self, text: str, code: str, quoted: bool = False):
+        self.text = text
+        self.code = code
+        self.quoted = quoted
+
+    def get_text(self, with_codes: bool) -> str:
+        text = self.text
+        if with_codes and self.code:
+            text = self.code + self.text + AnsiDecorator.plain_code
+        if self.quoted:
+            text = f'"{text}"'
+        return text
+
+    def __len__(self) -> int:
+        return len(self.text)
+
+    def __str__(self) -> str:
+        return self.get_text(colorize_console())
+
+class AnsiText:
+    def __init__(self, *args: 'SizedStringProtocol'):
+        self.args = args
+
+    def __len__(self) -> int:
+        return sum(len(x) for x in self.args)
+
+    def __str__(self) -> str:
+        return ''.join(str(x) for x in self.args)
+
+
+def bold(text: str, quoted: bool = False) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[1m", quoted=quoted)
+
+def italic(text: str, quoted: bool = False) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[3m", quoted=quoted)
+
+def plain(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "")
+
+def red(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[1;31m")
+
+def green(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[1;32m")
+
+def yellow(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[1;33m")
+
+def blue(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[1;34m")
+
+def cyan(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[1;36m")
+
+def normal_red(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[31m")
+
+def normal_green(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[32m")
+
+def normal_yellow(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[33m")
+
+def normal_blue(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[34m")
+
+def normal_cyan(text: str) -> AnsiDecorator:
+    return AnsiDecorator(text, "\033[36m")
+
+def get_error_location_string(fname: StringProtocol, lineno: int) -> str:
+    return f'{fname}:{lineno}:'
+
+def get_relative_path(target: Path, current: Path) -> Path:
+    """Get the path to target from current"""
+    # Go up "current" until we find a common ancestor to target
+    acc = ['.']
+    for part in [current, *current.parents]:
+        try:
+            path = target.relative_to(part)
+            return Path(*acc, path)
+        except ValueError:
+            pass
+        acc += ['..']
+
+    # we failed, should not get here
+    return target
+
+# Format a list for logging purposes as a string. It separates
+# all but the last item with commas, and the last with 'and'.
+def format_list(input_list: T.List[str]) -> str:
+    l = len(input_list)
+    if l > 2:
+        return ' and '.join([', '.join(input_list[:-1]), input_list[-1]])
+    elif l == 2:
+        return ' and '.join(input_list)
+    elif l == 1:
+        return input_list[0]
+    else:
+        return ''
+
+
+def code_line(text: str, line: str, colno: int) -> str:
+    """Print a line with a caret pointing to the colno
+
+    :param text: A message to display before the line
+    :param line: The line of code to be pointed to
+    :param colno: The column number to point at
+    :return: A formatted string of the text, line, and a caret
+    """
+    return f'{text}\n{line}\n{" " * colno}^'
diff --git a/vendored-meson/meson/mesonbuild/modules/__init__.py b/vendored-meson/meson/mesonbuild/modules/__init__.py
new file mode 100644
index 000000000000..daac9edb6dab
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/__init__.py
@@ -0,0 +1,277 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the base representation for import('modname')
+
+from __future__ import annotations
+import dataclasses
+import typing as T
+
+from .. import build, mesonlib
+from ..build import IncludeDirs
+from ..interpreterbase.decorators import noKwargs, noPosargs
+from ..mesonlib import relpath, HoldableObject, MachineChoice
+from ..programs import ExternalProgram
+
+if T.TYPE_CHECKING:
+    from ..interpreter import Interpreter
+    from ..interpreter.interpreterobjects import MachineHolder
+    from ..interpreterbase import TYPE_var, TYPE_kwargs
+    from ..programs import OverrideProgram
+    from ..wrap import WrapMode
+    from ..build import Executable
+    from ..dependencies import Dependency
+
+class ModuleState:
+    """Object passed to all module methods.
+
+    This is a WIP API provided to modules, it should be extended to have everything
+    needed so modules does not touch any other part of Meson internal APIs.
+    """
+
+    def __init__(self, interpreter: 'Interpreter') -> None:
+        # Keep it private, it should be accessed only through methods.
+        self._interpreter = interpreter
+
+        self.source_root = interpreter.environment.get_source_dir()
+        self.build_to_src = relpath(interpreter.environment.get_source_dir(),
+                                    interpreter.environment.get_build_dir())
+        self.subproject = interpreter.subproject
+        self.subdir = interpreter.subdir
+        self.root_subdir = interpreter.root_subdir
+        self.current_lineno = interpreter.current_lineno
+        self.environment = interpreter.environment
+        self.project_name = interpreter.build.project_name
+        self.project_version = interpreter.build.dep_manifest[interpreter.active_projectname].version
+        # The backend object is under-used right now, but we will need it:
+        # https://github.com/mesonbuild/meson/issues/1419
+        self.backend = interpreter.backend
+        self.targets = interpreter.build.targets
+        self.data = interpreter.build.data
+        self.headers = interpreter.build.get_headers()
+        self.man = interpreter.build.get_man()
+        self.global_args = interpreter.build.global_args.host
+        self.project_args = interpreter.build.projects_args.host.get(interpreter.subproject, {})
+        self.build_machine = T.cast('MachineHolder', interpreter.builtin['build_machine']).held_object
+        self.host_machine = T.cast('MachineHolder', interpreter.builtin['host_machine']).held_object
+        self.target_machine = T.cast('MachineHolder', interpreter.builtin['target_machine']).held_object
+        self.current_node = interpreter.current_node
+
+    def get_include_args(self, include_dirs: T.Iterable[T.Union[str, build.IncludeDirs]], prefix: str = '-I') -> T.List[str]:
+        if not include_dirs:
+            return []
+
+        srcdir = self.environment.get_source_dir()
+        builddir = self.environment.get_build_dir()
+
+        dirs_str: T.List[str] = []
+        for dirs in include_dirs:
+            if isinstance(dirs, str):
+                dirs_str += [f'{prefix}{dirs}']
+            else:
+                dirs_str.extend([f'{prefix}{i}' for i in dirs.to_string_list(srcdir, builddir)])
+                dirs_str.extend([f'{prefix}{i}' for i in dirs.get_extra_build_dirs()])
+
+        return dirs_str
+
+    def find_program(self, prog: T.Union[mesonlib.FileOrString, T.List[mesonlib.FileOrString]],
+                     required: bool = True,
+                     version_func: T.Optional[T.Callable[[T.Union[ExternalProgram, Executable, OverrideProgram]], str]] = None,
+                     wanted: T.Optional[str] = None, silent: bool = False,
+                     for_machine: MachineChoice = MachineChoice.HOST) -> T.Union[ExternalProgram, Executable, OverrideProgram]:
+        if not isinstance(prog, list):
+            prog = [prog]
+        return self._interpreter.find_program_impl(prog, required=required, version_func=version_func,
+                                                   wanted=wanted, silent=silent, for_machine=for_machine)
+
+    def find_tool(self, name: str, depname: str, varname: str, required: bool = True,
+                  wanted: T.Optional[str] = None) -> T.Union['Executable', ExternalProgram, 'OverrideProgram']:
+        # Look in overrides in case it's built as subproject
+        progobj = self._interpreter.program_from_overrides([name], [])
+        if progobj is not None:
+            return progobj
+
+        # Look in machine file
+        prog_list = self.environment.lookup_binary_entry(MachineChoice.HOST, name)
+        if prog_list is not None:
+            return ExternalProgram.from_entry(name, prog_list)
+
+        # Check if pkgconfig has a variable
+        dep = self.dependency(depname, native=True, required=False, wanted=wanted)
+        if dep.found() and dep.type_name == 'pkgconfig':
+            value = dep.get_variable(pkgconfig=varname)
+            if value:
+                return ExternalProgram(name, [value])
+
+        # Normal program lookup
+        return self.find_program(name, required=required, wanted=wanted)
+
+    def dependency(self, depname: str, native: bool = False, required: bool = True,
+                   wanted: T.Optional[str] = None) -> 'Dependency':
+        kwargs: T.Dict[str, object] = {'native': native, 'required': required}
+        if wanted:
+            kwargs['version'] = wanted
+        # FIXME: Even if we fix the function, mypy still can't figure out what's
+        # going on here. And we really dont want to call interpreter
+        # implementations of meson functions anyway.
+        return self._interpreter.func_dependency(self.current_node, [depname], kwargs) # type: ignore
+
+    def test(self, args: T.Tuple[str, T.Union[build.Executable, build.Jar, 'ExternalProgram', mesonlib.File]],
+             workdir: T.Optional[str] = None,
+             env: T.Union[T.List[str], T.Dict[str, str], str] = None,
+             depends: T.List[T.Union[build.CustomTarget, build.BuildTarget]] = None) -> None:
+        kwargs = {'workdir': workdir,
+                  'env': env,
+                  'depends': depends,
+                  }
+        # typed_* takes a list, and gives a tuple to func_test. Violating that constraint
+        # makes the universe (or at least use of this function) implode
+        real_args = list(args)
+        # TODO: Use interpreter internal API, but we need to go through @typed_kwargs
+        self._interpreter.func_test(self.current_node, real_args, kwargs)
+
+    def get_option(self, name: str, subproject: str = '',
+                   machine: MachineChoice = MachineChoice.HOST,
+                   lang: T.Optional[str] = None,
+                   module: T.Optional[str] = None) -> T.Union[T.List[str], str, int, bool, 'WrapMode']:
+        return self.environment.coredata.get_option(mesonlib.OptionKey(name, subproject, machine, lang, module))
+
+    def is_user_defined_option(self, name: str, subproject: str = '',
+                               machine: MachineChoice = MachineChoice.HOST,
+                               lang: T.Optional[str] = None,
+                               module: T.Optional[str] = None) -> bool:
+        key = mesonlib.OptionKey(name, subproject, machine, lang, module)
+        return key in self._interpreter.user_defined_options.cmd_line_options
+
+    def process_include_dirs(self, dirs: T.Iterable[T.Union[str, IncludeDirs]]) -> T.Iterable[IncludeDirs]:
+        """Convert raw include directory arguments to only IncludeDirs
+
+        :param dirs: An iterable of strings and IncludeDirs
+        :return: None
+        :yield: IncludeDirs objects
+        """
+        for d in dirs:
+            if isinstance(d, IncludeDirs):
+                yield d
+            else:
+                yield self._interpreter.build_incdir_object([d])
+
+
+class ModuleObject(HoldableObject):
+    """Base class for all objects returned by modules
+    """
+    def __init__(self) -> None:
+        self.methods: T.Dict[
+            str,
+            T.Callable[[ModuleState, T.List['TYPE_var'], 'TYPE_kwargs'], T.Union[ModuleReturnValue, 'TYPE_var']]
+        ] = {}
+
+
+class MutableModuleObject(ModuleObject):
+    pass
+
+
+@dataclasses.dataclass
+class ModuleInfo:
+
+    """Metadata about a Module."""
+
+    name: str
+    added: T.Optional[str] = None
+    deprecated: T.Optional[str] = None
+    unstable: bool = False
+    stabilized: T.Optional[str] = None
+
+
+class NewExtensionModule(ModuleObject):
+
+    """Class for modern modules
+
+    provides the found method.
+    """
+
+    INFO: ModuleInfo
+
+    def __init__(self) -> None:
+        super().__init__()
+        self.methods.update({
+            'found': self.found_method,
+        })
+
+    @noPosargs
+    @noKwargs
+    def found_method(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool:
+        return self.found()
+
+    @staticmethod
+    def found() -> bool:
+        return True
+
+    def postconf_hook(self, b: build.Build) -> None:
+        pass
+
+# FIXME: Port all modules to stop using self.interpreter and use API on
+# ModuleState instead. Modules should stop using this class and instead use
+# ModuleObject base class.
+class ExtensionModule(NewExtensionModule):
+    def __init__(self, interpreter: 'Interpreter') -> None:
+        super().__init__()
+        self.interpreter = interpreter
+
+class NotFoundExtensionModule(NewExtensionModule):
+
+    """Class for modern modules
+
+    provides the found method.
+    """
+
+    def __init__(self, name: str) -> None:
+        super().__init__()
+        self.INFO = ModuleInfo(name)
+
+    @staticmethod
+    def found() -> bool:
+        return False
+
+
+def is_module_library(fname: mesonlib.FileOrString) -> bool:
+    '''
+    Check if the file is a library-like file generated by a module-specific
+    target, such as GirTarget or TypelibTarget
+    '''
+    suffix = fname.split('.')[-1]
+    return suffix in {'gir', 'typelib'}
+
+
+class ModuleReturnValue:
+    def __init__(self, return_value: T.Optional['TYPE_var'],
+                 new_objects: T.Sequence[T.Union['TYPE_var', 'build.ExecutableSerialisation']]) -> None:
+        self.return_value = return_value
+        assert isinstance(new_objects, list)
+        self.new_objects: T.List[T.Union['TYPE_var', 'build.ExecutableSerialisation']] = new_objects
+
+class GResourceTarget(build.CustomTarget):
+    pass
+
+class GResourceHeaderTarget(build.CustomTarget):
+    pass
+
+class GirTarget(build.CustomTarget):
+    pass
+
+class TypelibTarget(build.CustomTarget):
+    pass
+
+class VapiTarget(build.CustomTarget):
+    pass
diff --git a/vendored-meson/meson/mesonbuild/modules/cmake.py b/vendored-meson/meson/mesonbuild/modules/cmake.py
new file mode 100644
index 000000000000..c6048f92502b
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/cmake.py
@@ -0,0 +1,450 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+import re
+import os, os.path, pathlib
+import shutil
+import typing as T
+
+from . import ExtensionModule, ModuleReturnValue, ModuleObject, ModuleInfo
+
+from .. import build, mesonlib, mlog, dependencies
+from ..cmake import TargetOptions, cmake_defines_to_args
+from ..interpreter import SubprojectHolder
+from ..interpreter.type_checking import REQUIRED_KW, INSTALL_DIR_KW, NoneType, in_set_validator
+from ..interpreterbase import (
+    FeatureNew,
+    FeatureNewKwargs,
+
+    stringArgs,
+    permittedKwargs,
+    noPosargs,
+    noKwargs,
+
+    InvalidArguments,
+    InterpreterException,
+
+    typed_pos_args,
+    typed_kwargs,
+    KwargInfo,
+    ContainerTypeInfo,
+)
+
+if T.TYPE_CHECKING:
+    from typing_extensions import TypedDict
+
+    from . import ModuleState
+    from ..cmake import SingleTargetOptions
+    from ..environment import Environment
+    from ..interpreter import Interpreter, kwargs
+    from ..interpreterbase import TYPE_kwargs, TYPE_var
+
+    class WriteBasicPackageVersionFile(TypedDict):
+
+        arch_independent: bool
+        compatibility: str
+        install_dir: T.Optional[str]
+        name: str
+        version: str
+
+    class ConfigurePackageConfigFile(TypedDict):
+
+        configuration: T.Union[build.ConfigurationData, dict]
+        input: T.Union[str, mesonlib.File]
+        install_dir: T.Optional[str]
+        name: str
+
+    class Subproject(kwargs.ExtractRequired):
+
+        options: T.Optional[CMakeSubprojectOptions]
+        cmake_options: T.List[str]
+
+
+COMPATIBILITIES = ['AnyNewerVersion', 'SameMajorVersion', 'SameMinorVersion', 'ExactVersion']
+
+# Taken from https://github.com/Kitware/CMake/blob/master/Modules/CMakePackageConfigHelpers.cmake
+PACKAGE_INIT_BASE = '''
+####### Expanded from \\@PACKAGE_INIT\\@ by configure_package_config_file() #######
+####### Any changes to this file will be overwritten by the next CMake run ####
+####### The input file was @inputFileName@ ########
+
+get_filename_component(PACKAGE_PREFIX_DIR "${CMAKE_CURRENT_LIST_DIR}/@PACKAGE_RELATIVE_PATH@" ABSOLUTE)
+'''
+PACKAGE_INIT_EXT = '''
+# Use original install prefix when loaded through a "/usr move"
+# cross-prefix symbolic link such as /lib -> /usr/lib.
+get_filename_component(_realCurr "${CMAKE_CURRENT_LIST_DIR}" REALPATH)
+get_filename_component(_realOrig "@absInstallDir@" REALPATH)
+if(_realCurr STREQUAL _realOrig)
+  set(PACKAGE_PREFIX_DIR "@installPrefix@")
+endif()
+unset(_realOrig)
+unset(_realCurr)
+'''
+PACKAGE_INIT_SET_AND_CHECK = '''
+macro(set_and_check _var _file)
+  set(${_var} "${_file}")
+  if(NOT EXISTS "${_file}")
+    message(FATAL_ERROR "File or directory ${_file} referenced by variable ${_var} does not exist !")
+  endif()
+endmacro()
+
+####################################################################################
+'''
+
+class CMakeSubproject(ModuleObject):
+    def __init__(self, subp: SubprojectHolder):
+        assert isinstance(subp, SubprojectHolder)
+        assert subp.cm_interpreter is not None
+        super().__init__()
+        self.subp = subp
+        self.cm_interpreter = subp.cm_interpreter
+        self.methods.update({'get_variable': self.get_variable,
+                             'dependency': self.dependency,
+                             'include_directories': self.include_directories,
+                             'target': self.target,
+                             'target_type': self.target_type,
+                             'target_list': self.target_list,
+                             'found': self.found_method,
+                             })
+
+    def _args_to_info(self, args: T.List[str]) -> T.Dict[str, str]:
+        if len(args) != 1:
+            raise InterpreterException('Exactly one argument is required.')
+
+        tgt = args[0]
+        res = self.cm_interpreter.target_info(tgt)
+        if res is None:
+            raise InterpreterException(f'The CMake target {tgt} does not exist\n' +
+                                       '  Use the following command in your meson.build to list all available targets:\n\n' +
+                                       '    message(\'CMake targets:\\n - \' + \'\\n - \'.join(.target_list()))')
+
+        # Make sure that all keys are present (if not this is a bug)
+        assert all(x in res for x in ['inc', 'src', 'dep', 'tgt', 'func'])
+        return res
+
+    @noKwargs
+    @stringArgs
+    def get_variable(self, state: ModuleState, args: T.List[str], kwargs: TYPE_kwargs) -> TYPE_var:
+        return self.subp.get_variable_method(args, kwargs)
+
+    @FeatureNewKwargs('dependency', '0.56.0', ['include_type'])
+    @permittedKwargs({'include_type'})
+    @stringArgs
+    def dependency(self, state: ModuleState, args: T.List[str], kwargs: T.Dict[str, str]) -> dependencies.Dependency:
+        info = self._args_to_info(args)
+        if info['func'] == 'executable':
+            raise InvalidArguments(f'{args[0]} is an executable and does not support the dependency() method. Use target() instead.')
+        orig = self.get_variable(state, [info['dep']], {})
+        assert isinstance(orig, dependencies.Dependency)
+        actual = orig.include_type
+        if 'include_type' in kwargs and kwargs['include_type'] != actual:
+            mlog.debug('Current include type is {}. Converting to requested {}'.format(actual, kwargs['include_type']))
+            return orig.generate_system_dependency(kwargs['include_type'])
+        return orig
+
+    @noKwargs
+    @stringArgs
+    def include_directories(self, state: ModuleState, args: T.List[str], kwargs: TYPE_kwargs) -> build.IncludeDirs:
+        info = self._args_to_info(args)
+        return self.get_variable(state, [info['inc']], kwargs)
+
+    @noKwargs
+    @stringArgs
+    def target(self, state: ModuleState, args: T.List[str], kwargs: TYPE_kwargs) -> build.Target:
+        info = self._args_to_info(args)
+        return self.get_variable(state, [info['tgt']], kwargs)
+
+    @noKwargs
+    @stringArgs
+    def target_type(self, state: ModuleState, args: T.List[str], kwargs: TYPE_kwargs) -> str:
+        info = self._args_to_info(args)
+        return info['func']
+
+    @noPosargs
+    @noKwargs
+    def target_list(self, state: ModuleState, args: TYPE_var, kwargs: TYPE_kwargs) -> T.List[str]:
+        return self.cm_interpreter.target_list()
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('CMakeSubproject.found()', '0.53.2')
+    def found_method(self, state: ModuleState, args: TYPE_var, kwargs: TYPE_kwargs) -> bool:
+        return self.subp is not None
+
+
+class CMakeSubprojectOptions(ModuleObject):
+    def __init__(self) -> None:
+        super().__init__()
+        self.cmake_options = []  # type: T.List[str]
+        self.target_options = TargetOptions()
+
+        self.methods.update(
+            {
+                'add_cmake_defines': self.add_cmake_defines,
+                'set_override_option': self.set_override_option,
+                'set_install': self.set_install,
+                'append_compile_args': self.append_compile_args,
+                'append_link_args': self.append_link_args,
+                'clear': self.clear,
+            }
+        )
+
+    def _get_opts(self, kwargs: dict) -> SingleTargetOptions:
+        if 'target' in kwargs:
+            return self.target_options[kwargs['target']]
+        return self.target_options.global_options
+
+    @typed_pos_args('subproject_options.add_cmake_defines', varargs=dict)
+    @noKwargs
+    def add_cmake_defines(self, state: ModuleState, args: T.Tuple[T.List[T.Dict[str, TYPE_var]]], kwargs: TYPE_kwargs) -> None:
+        self.cmake_options += cmake_defines_to_args(args[0])
+
+    @typed_pos_args('subproject_options.set_override_option', str, str)
+    @permittedKwargs({'target'})
+    def set_override_option(self, state: ModuleState, args: T.Tuple[str, str], kwargs: TYPE_kwargs) -> None:
+        self._get_opts(kwargs).set_opt(args[0], args[1])
+
+    @typed_pos_args('subproject_options.set_install', bool)
+    @permittedKwargs({'target'})
+    def set_install(self, state: ModuleState, args: T.Tuple[bool], kwargs: TYPE_kwargs) -> None:
+        self._get_opts(kwargs).set_install(args[0])
+
+    @typed_pos_args('subproject_options.append_compile_args', str, varargs=str, min_varargs=1)
+    @permittedKwargs({'target'})
+    def append_compile_args(self, state: ModuleState, args: T.Tuple[str, T.List[str]], kwargs: TYPE_kwargs) -> None:
+        self._get_opts(kwargs).append_args(args[0], args[1])
+
+    @typed_pos_args('subproject_options.append_compile_args', varargs=str, min_varargs=1)
+    @permittedKwargs({'target'})
+    def append_link_args(self, state: ModuleState, args: T.Tuple[T.List[str]], kwargs: TYPE_kwargs) -> None:
+        self._get_opts(kwargs).append_link_args(args[0])
+
+    @noPosargs
+    @noKwargs
+    def clear(self, state: ModuleState, args: TYPE_var, kwargs: TYPE_kwargs) -> None:
+        self.cmake_options.clear()
+        self.target_options = TargetOptions()
+
+
+class CmakeModule(ExtensionModule):
+    cmake_detected = False
+    cmake_root = None
+
+    INFO = ModuleInfo('cmake', '0.50.0')
+
+    def __init__(self, interpreter: Interpreter) -> None:
+        super().__init__(interpreter)
+        self.methods.update({
+            'write_basic_package_version_file': self.write_basic_package_version_file,
+            'configure_package_config_file': self.configure_package_config_file,
+            'subproject': self.subproject,
+            'subproject_options': self.subproject_options,
+        })
+
+    def detect_voidp_size(self, env: Environment) -> int:
+        compilers = env.coredata.compilers.host
+        compiler = compilers.get('c', None)
+        if not compiler:
+            compiler = compilers.get('cpp', None)
+
+        if not compiler:
+            raise mesonlib.MesonException('Requires a C or C++ compiler to compute sizeof(void *).')
+
+        return compiler.sizeof('void *', '', env)[0]
+
+    def detect_cmake(self, state: ModuleState) -> bool:
+        if self.cmake_detected:
+            return True
+
+        cmakebin = state.find_program('cmake', silent=False)
+        if not cmakebin.found():
+            return False
+
+        p, stdout, stderr = mesonlib.Popen_safe(cmakebin.get_command() + ['--system-information', '-G', 'Ninja'])[0:3]
+        if p.returncode != 0:
+            mlog.log(f'error retrieving cmake information: returnCode={p.returncode} stdout={stdout} stderr={stderr}')
+            return False
+
+        match = re.search('\nCMAKE_ROOT \\"([^"]+)"\n', stdout.strip())
+        if not match:
+            mlog.log('unable to determine cmake root')
+            return False
+
+        cmakePath = pathlib.PurePath(match.group(1))
+        self.cmake_root = os.path.join(*cmakePath.parts)
+        self.cmake_detected = True
+        return True
+
+    @noPosargs
+    @typed_kwargs(
+        'cmake.write_basic_package_version_file',
+        KwargInfo('arch_independent', bool, default=False, since='0.62.0'),
+        KwargInfo('compatibility', str, default='AnyNewerVersion', validator=in_set_validator(set(COMPATIBILITIES))),
+        KwargInfo('name', str, required=True),
+        KwargInfo('version', str, required=True),
+        INSTALL_DIR_KW,
+    )
+    def write_basic_package_version_file(self, state: ModuleState, args: TYPE_var, kwargs: 'WriteBasicPackageVersionFile') -> ModuleReturnValue:
+        arch_independent = kwargs['arch_independent']
+        compatibility = kwargs['compatibility']
+        name = kwargs['name']
+        version = kwargs['version']
+
+        if not self.detect_cmake(state):
+            raise mesonlib.MesonException('Unable to find cmake')
+
+        pkgroot = pkgroot_name = kwargs['install_dir']
+        if pkgroot is None:
+            pkgroot = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'cmake', name)
+            pkgroot_name = os.path.join('{libdir}', 'cmake', name)
+
+        template_file = os.path.join(self.cmake_root, 'Modules', f'BasicConfigVersion-{compatibility}.cmake.in')
+        if not os.path.exists(template_file):
+            raise mesonlib.MesonException(f'your cmake installation doesn\'t support the {compatibility} compatibility')
+
+        version_file = os.path.join(state.environment.scratch_dir, f'{name}ConfigVersion.cmake')
+
+        conf: T.Dict[str, T.Union[str, bool, int]] = {
+            'CVF_VERSION': version,
+            'CMAKE_SIZEOF_VOID_P': str(self.detect_voidp_size(state.environment)),
+            'CVF_ARCH_INDEPENDENT': arch_independent,
+        }
+        mesonlib.do_conf_file(template_file, version_file, build.ConfigurationData(conf), 'meson')
+
+        res = build.Data([mesonlib.File(True, state.environment.get_scratch_dir(), version_file)], pkgroot, pkgroot_name, None, state.subproject)
+        return ModuleReturnValue(res, [res])
+
+    def create_package_file(self, infile: str, outfile: str, PACKAGE_RELATIVE_PATH: str, extra: str, confdata: build.ConfigurationData) -> None:
+        package_init = PACKAGE_INIT_BASE.replace('@PACKAGE_RELATIVE_PATH@', PACKAGE_RELATIVE_PATH)
+        package_init = package_init.replace('@inputFileName@', os.path.basename(infile))
+        package_init += extra
+        package_init += PACKAGE_INIT_SET_AND_CHECK
+
+        try:
+            with open(infile, encoding='utf-8') as fin:
+                data = fin.readlines()
+        except Exception as e:
+            raise mesonlib.MesonException(f'Could not read input file {infile}: {e!s}')
+
+        result = []
+        regex = mesonlib.get_variable_regex('cmake@')
+        for line in data:
+            line = line.replace('@PACKAGE_INIT@', package_init)
+            line, _missing = mesonlib.do_replacement(regex, line, 'cmake@', confdata)
+
+            result.append(line)
+
+        outfile_tmp = outfile + "~"
+        with open(outfile_tmp, "w", encoding='utf-8') as fout:
+            fout.writelines(result)
+
+        shutil.copymode(infile, outfile_tmp)
+        mesonlib.replace_if_different(outfile, outfile_tmp)
+
+    @noPosargs
+    @typed_kwargs(
+        'cmake.configure_package_config_file',
+        KwargInfo('configuration', (build.ConfigurationData, dict), required=True),
+        KwargInfo('input',
+                  (str, mesonlib.File, ContainerTypeInfo(list, mesonlib.File)), required=True,
+                  validator=lambda x: 'requires exactly one file' if isinstance(x, list) and len(x) != 1 else None,
+                  convertor=lambda x: x[0] if isinstance(x, list) else x),
+        KwargInfo('name', str, required=True),
+        INSTALL_DIR_KW,
+    )
+    def configure_package_config_file(self, state: ModuleState, args: TYPE_var, kwargs: 'ConfigurePackageConfigFile') -> build.Data:
+        inputfile = kwargs['input']
+        if isinstance(inputfile, str):
+            inputfile = mesonlib.File.from_source_file(state.environment.source_dir, state.subdir, inputfile)
+
+        ifile_abs = inputfile.absolute_path(state.environment.source_dir, state.environment.build_dir)
+
+        name = kwargs['name']
+
+        (ofile_path, ofile_fname) = os.path.split(os.path.join(state.subdir, f'{name}Config.cmake'))
+        ofile_abs = os.path.join(state.environment.build_dir, ofile_path, ofile_fname)
+
+        install_dir = kwargs['install_dir']
+        if install_dir is None:
+            install_dir = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'cmake', name)
+
+        conf = kwargs['configuration']
+        if isinstance(conf, dict):
+            FeatureNew.single_use('cmake.configure_package_config_file dict as configuration', '0.62.0', state.subproject, location=state.current_node)
+            conf = build.ConfigurationData(conf)
+
+        prefix = state.environment.coredata.get_option(mesonlib.OptionKey('prefix'))
+        abs_install_dir = install_dir
+        if not os.path.isabs(abs_install_dir):
+            abs_install_dir = os.path.join(prefix, install_dir)
+
+        # path used in cmake scripts are POSIX even on Windows
+        PACKAGE_RELATIVE_PATH = pathlib.PurePath(os.path.relpath(prefix, abs_install_dir)).as_posix()
+        extra = ''
+        if re.match('^(/usr)?/lib(64)?/.+', abs_install_dir):
+            extra = PACKAGE_INIT_EXT.replace('@absInstallDir@', abs_install_dir)
+            extra = extra.replace('@installPrefix@', prefix)
+
+        self.create_package_file(ifile_abs, ofile_abs, PACKAGE_RELATIVE_PATH, extra, conf)
+        conf.used = True
+
+        conffile = os.path.normpath(inputfile.relative_name())
+        self.interpreter.build_def_files.add(conffile)
+
+        res = build.Data([mesonlib.File(True, ofile_path, ofile_fname)], install_dir, install_dir, None, state.subproject)
+        self.interpreter.build.data.append(res)
+
+        return res
+
+    @FeatureNew('subproject', '0.51.0')
+    @typed_pos_args('cmake.subproject', str)
+    @typed_kwargs(
+        'cmake.subproject',
+        REQUIRED_KW,
+        KwargInfo('options', (CMakeSubprojectOptions, NoneType), since='0.55.0'),
+        KwargInfo(
+            'cmake_options',
+            ContainerTypeInfo(list, str),
+            default=[],
+            listify=True,
+            deprecated='0.55.0',
+            deprecated_message='Use options instead',
+        ),
+    )
+    def subproject(self, state: ModuleState, args: T.Tuple[str], kwargs_: Subproject) -> T.Union[SubprojectHolder, CMakeSubproject]:
+        if kwargs_['cmake_options'] and kwargs_['options'] is not None:
+            raise InterpreterException('"options" cannot be used together with "cmake_options"')
+        dirname = args[0]
+        kw: kwargs.DoSubproject = {
+            'required': kwargs_['required'],
+            'options': kwargs_['options'],
+            'cmake_options': kwargs_['cmake_options'],
+            'default_options': {},
+            'version': [],
+        }
+        subp = self.interpreter.do_subproject(dirname, 'cmake', kw)
+        if not subp.found():
+            return subp
+        return CMakeSubproject(subp)
+
+    @FeatureNew('subproject_options', '0.55.0')
+    @noKwargs
+    @noPosargs
+    def subproject_options(self, state: ModuleState, args: TYPE_var, kwargs: TYPE_kwargs) -> CMakeSubprojectOptions:
+        return CMakeSubprojectOptions()
+
+def initialize(*args: T.Any, **kwargs: T.Any) -> CmakeModule:
+    return CmakeModule(*args, **kwargs)
diff --git a/vendored-meson/meson/mesonbuild/modules/cuda.py b/vendored-meson/meson/mesonbuild/modules/cuda.py
new file mode 100644
index 000000000000..6f809cb34f1b
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/cuda.py
@@ -0,0 +1,390 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import typing as T
+import re
+
+from ..mesonlib import version_compare
+from ..compilers.cuda import CudaCompiler
+
+from . import NewExtensionModule, ModuleInfo
+
+from ..interpreterbase import (
+    flatten, permittedKwargs, noKwargs,
+    InvalidArguments
+)
+
+if T.TYPE_CHECKING:
+    from . import ModuleState
+    from ..compilers import Compiler
+
+class CudaModule(NewExtensionModule):
+
+    INFO = ModuleInfo('CUDA', '0.50.0', unstable=True)
+
+    def __init__(self, *args, **kwargs):
+        super().__init__()
+        self.methods.update({
+            "min_driver_version": self.min_driver_version,
+            "nvcc_arch_flags":    self.nvcc_arch_flags,
+            "nvcc_arch_readable": self.nvcc_arch_readable,
+        })
+
+    @noKwargs
+    def min_driver_version(self, state: 'ModuleState',
+                           args: T.Tuple[str],
+                           kwargs: T.Dict[str, T.Any]) -> str:
+        argerror = InvalidArguments('min_driver_version must have exactly one positional argument: ' +
+                                    'a CUDA Toolkit version string. Beware that, since CUDA 11.0, ' +
+                                    'the CUDA Toolkit\'s components (including NVCC) are versioned ' +
+                                    'independently from each other (and the CUDA Toolkit as a whole).')
+
+        if len(args) != 1 or not isinstance(args[0], str):
+            raise argerror
+
+        cuda_version = args[0]
+        driver_version_table = [
+            {'cuda_version': '>=12.0.0',   'windows': '527.41', 'linux': '525.60.13'},
+            {'cuda_version': '>=11.8.0',   'windows': '522.06', 'linux': '520.61.05'},
+            {'cuda_version': '>=11.7.1',   'windows': '516.31', 'linux': '515.48.07'},
+            {'cuda_version': '>=11.7.0',   'windows': '516.01', 'linux': '515.43.04'},
+            {'cuda_version': '>=11.6.1',   'windows': '511.65', 'linux': '510.47.03'},
+            {'cuda_version': '>=11.6.0',   'windows': '511.23', 'linux': '510.39.01'},
+            {'cuda_version': '>=11.5.1',   'windows': '496.13', 'linux': '495.29.05'},
+            {'cuda_version': '>=11.5.0',   'windows': '496.04', 'linux': '495.29.05'},
+            {'cuda_version': '>=11.4.3',   'windows': '472.50', 'linux': '470.82.01'},
+            {'cuda_version': '>=11.4.1',   'windows': '471.41', 'linux': '470.57.02'},
+            {'cuda_version': '>=11.4.0',   'windows': '471.11', 'linux': '470.42.01'},
+            {'cuda_version': '>=11.3.0',   'windows': '465.89', 'linux': '465.19.01'},
+            {'cuda_version': '>=11.2.2',   'windows': '461.33', 'linux': '460.32.03'},
+            {'cuda_version': '>=11.2.1',   'windows': '461.09', 'linux': '460.32.03'},
+            {'cuda_version': '>=11.2.0',   'windows': '460.82', 'linux': '460.27.03'},
+            {'cuda_version': '>=11.1.1',   'windows': '456.81', 'linux': '455.32'},
+            {'cuda_version': '>=11.1.0',   'windows': '456.38', 'linux': '455.23'},
+            {'cuda_version': '>=11.0.3',   'windows': '451.82', 'linux': '450.51.06'},
+            {'cuda_version': '>=11.0.2',   'windows': '451.48', 'linux': '450.51.05'},
+            {'cuda_version': '>=11.0.1',   'windows': '451.22', 'linux': '450.36.06'},
+            {'cuda_version': '>=10.2.89',  'windows': '441.22', 'linux': '440.33'},
+            {'cuda_version': '>=10.1.105', 'windows': '418.96', 'linux': '418.39'},
+            {'cuda_version': '>=10.0.130', 'windows': '411.31', 'linux': '410.48'},
+            {'cuda_version': '>=9.2.148',  'windows': '398.26', 'linux': '396.37'},
+            {'cuda_version': '>=9.2.88',   'windows': '397.44', 'linux': '396.26'},
+            {'cuda_version': '>=9.1.85',   'windows': '391.29', 'linux': '390.46'},
+            {'cuda_version': '>=9.0.76',   'windows': '385.54', 'linux': '384.81'},
+            {'cuda_version': '>=8.0.61',   'windows': '376.51', 'linux': '375.26'},
+            {'cuda_version': '>=8.0.44',   'windows': '369.30', 'linux': '367.48'},
+            {'cuda_version': '>=7.5.16',   'windows': '353.66', 'linux': '352.31'},
+            {'cuda_version': '>=7.0.28',   'windows': '347.62', 'linux': '346.46'},
+        ]
+
+        driver_version = 'unknown'
+        for d in driver_version_table:
+            if version_compare(cuda_version, d['cuda_version']):
+                driver_version = d.get(state.host_machine.system, d['linux'])
+                break
+
+        return driver_version
+
+    @permittedKwargs(['detected'])
+    def nvcc_arch_flags(self, state: 'ModuleState',
+                        args: T.Tuple[T.Union[Compiler, CudaCompiler, str]],
+                        kwargs: T.Dict[str, T.Any]) -> T.List[str]:
+        nvcc_arch_args = self._validate_nvcc_arch_args(args, kwargs)
+        ret = self._nvcc_arch_flags(*nvcc_arch_args)[0]
+        return ret
+
+    @permittedKwargs(['detected'])
+    def nvcc_arch_readable(self, state: 'ModuleState',
+                           args: T.Tuple[T.Union[Compiler, CudaCompiler, str]],
+                           kwargs: T.Dict[str, T.Any]) -> T.List[str]:
+        nvcc_arch_args = self._validate_nvcc_arch_args(args, kwargs)
+        ret = self._nvcc_arch_flags(*nvcc_arch_args)[1]
+        return ret
+
+    @staticmethod
+    def _break_arch_string(s):
+        s = re.sub('[ \t\r\n,;]+', ';', s)
+        s = s.strip(';').split(';')
+        return s
+
+    @staticmethod
+    def _detected_cc_from_compiler(c):
+        if isinstance(c, CudaCompiler):
+            return c.detected_cc
+        return ''
+
+    @staticmethod
+    def _version_from_compiler(c):
+        if isinstance(c, CudaCompiler):
+            return c.version
+        if isinstance(c, str):
+            return c
+        return 'unknown'
+
+    def _validate_nvcc_arch_args(self, args, kwargs):
+        argerror = InvalidArguments('The first argument must be an NVCC compiler object, or its version string!')
+
+        if len(args) < 1:
+            raise argerror
+        else:
+            compiler = args[0]
+            cuda_version = self._version_from_compiler(compiler)
+            if cuda_version == 'unknown':
+                raise argerror
+
+        arch_list = [] if len(args) <= 1 else flatten(args[1:])
+        arch_list = [self._break_arch_string(a) for a in arch_list]
+        arch_list = flatten(arch_list)
+        if len(arch_list) > 1 and not set(arch_list).isdisjoint({'All', 'Common', 'Auto'}):
+            raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''')
+        arch_list = arch_list[0] if len(arch_list) == 1 else arch_list
+
+        detected = kwargs.get('detected', self._detected_cc_from_compiler(compiler))
+        detected = flatten([detected])
+        detected = [self._break_arch_string(a) for a in detected]
+        detected = flatten(detected)
+        if not set(detected).isdisjoint({'All', 'Common', 'Auto'}):
+            raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''')
+
+        return cuda_version, arch_list, detected
+
+    def _filter_cuda_arch_list(self, cuda_arch_list, lo=None, hi=None, saturate=None):
+        """
+        Filter CUDA arch list (no codenames) for >= low and < hi architecture
+        bounds, and deduplicate.
+        If saturate is provided, architectures >= hi are replaced with saturate.
+        """
+
+        filtered_cuda_arch_list = []
+        for arch in cuda_arch_list:
+            if arch:
+                if lo and version_compare(arch, '<' + lo):
+                    continue
+                if hi and version_compare(arch, '>=' + hi):
+                    if not saturate:
+                        continue
+                    arch = saturate
+                if arch not in filtered_cuda_arch_list:
+                    filtered_cuda_arch_list.append(arch)
+        return filtered_cuda_arch_list
+
+    def _nvcc_arch_flags(self, cuda_version, cuda_arch_list='Auto', detected=''):
+        """
+        Using the CUDA Toolkit version and the target architectures, compute
+        the NVCC architecture flags.
+        """
+
+        # Replicates much of the logic of
+        #     https://github.com/Kitware/CMake/blob/master/Modules/FindCUDA/select_compute_arch.cmake
+        # except that a bug with cuda_arch_list="All" is worked around by
+        # tracking both lower and upper limits on GPU architectures.
+
+        cuda_known_gpu_architectures   = ['Fermi', 'Kepler', 'Maxwell']  # noqa: E221
+        cuda_common_gpu_architectures  = ['3.0', '3.5', '5.0']           # noqa: E221
+        cuda_hi_limit_gpu_architecture = None                            # noqa: E221
+        cuda_lo_limit_gpu_architecture = '2.0'                           # noqa: E221
+        cuda_all_gpu_architectures     = ['3.0', '3.2', '3.5', '5.0']    # noqa: E221
+
+        if version_compare(cuda_version, '<7.0'):
+            cuda_hi_limit_gpu_architecture = '5.2'
+
+        if version_compare(cuda_version, '>=7.0'):
+            cuda_known_gpu_architectures  += ['Kepler+Tegra', 'Kepler+Tesla', 'Maxwell+Tegra']  # noqa: E221
+            cuda_common_gpu_architectures += ['5.2']                                            # noqa: E221
+
+            if version_compare(cuda_version, '<8.0'):
+                cuda_common_gpu_architectures += ['5.2+PTX']  # noqa: E221
+                cuda_hi_limit_gpu_architecture = '6.0'        # noqa: E221
+
+        if version_compare(cuda_version, '>=8.0'):
+            cuda_known_gpu_architectures  += ['Pascal', 'Pascal+Tegra']  # noqa: E221
+            cuda_common_gpu_architectures += ['6.0', '6.1']              # noqa: E221
+            cuda_all_gpu_architectures    += ['6.0', '6.1', '6.2']       # noqa: E221
+
+            if version_compare(cuda_version, '<9.0'):
+                cuda_common_gpu_architectures += ['6.1+PTX']  # noqa: E221
+                cuda_hi_limit_gpu_architecture = '7.0'        # noqa: E221
+
+        if version_compare(cuda_version, '>=9.0'):
+            cuda_known_gpu_architectures  += ['Volta', 'Xavier'] # noqa: E221
+            cuda_common_gpu_architectures += ['7.0']             # noqa: E221
+            cuda_all_gpu_architectures    += ['7.0', '7.2']      # noqa: E221
+            # https://docs.nvidia.com/cuda/archive/9.0/cuda-toolkit-release-notes/index.html#unsupported-features
+            cuda_lo_limit_gpu_architecture = '3.0'               # noqa: E221
+
+            if version_compare(cuda_version, '<10.0'):
+                cuda_common_gpu_architectures += ['7.2+PTX']  # noqa: E221
+                cuda_hi_limit_gpu_architecture = '8.0'        # noqa: E221
+
+        if version_compare(cuda_version, '>=10.0'):
+            cuda_known_gpu_architectures  += ['Turing'] # noqa: E221
+            cuda_common_gpu_architectures += ['7.5']    # noqa: E221
+            cuda_all_gpu_architectures    += ['7.5']    # noqa: E221
+
+            if version_compare(cuda_version, '<11.0'):
+                cuda_common_gpu_architectures += ['7.5+PTX']  # noqa: E221
+                cuda_hi_limit_gpu_architecture = '8.0'        # noqa: E221
+
+        # need to account for the fact that Ampere is commonly assumed to include
+        # SM8.0 and SM8.6 even though CUDA 11.0 doesn't support SM8.6
+        cuda_ampere_bin = ['8.0']
+        cuda_ampere_ptx = ['8.0']
+        if version_compare(cuda_version, '>=11.0'):
+            cuda_known_gpu_architectures  += ['Ampere'] # noqa: E221
+            cuda_common_gpu_architectures += ['8.0']    # noqa: E221
+            cuda_all_gpu_architectures    += ['8.0']    # noqa: E221
+            # https://docs.nvidia.com/cuda/archive/11.0/cuda-toolkit-release-notes/index.html#deprecated-features
+            cuda_lo_limit_gpu_architecture = '3.5'      # noqa: E221
+
+            if version_compare(cuda_version, '<11.1'):
+                cuda_common_gpu_architectures += ['8.0+PTX']  # noqa: E221
+                cuda_hi_limit_gpu_architecture = '8.6'        # noqa: E221
+
+        if version_compare(cuda_version, '>=11.1'):
+            cuda_ampere_bin += ['8.6'] # noqa: E221
+            cuda_ampere_ptx  = ['8.6'] # noqa: E221
+
+            cuda_common_gpu_architectures += ['8.6']             # noqa: E221
+            cuda_all_gpu_architectures    += ['8.6']             # noqa: E221
+
+            if version_compare(cuda_version, '<11.8'):
+                cuda_common_gpu_architectures += ['8.6+PTX']  # noqa: E221
+                cuda_hi_limit_gpu_architecture = '8.7'        # noqa: E221
+
+        if version_compare(cuda_version, '>=11.8'):
+            cuda_known_gpu_architectures  += ['Orin', 'Lovelace', 'Hopper']  # noqa: E221
+            cuda_common_gpu_architectures += ['8.9', '9.0', '9.0+PTX']       # noqa: E221
+            cuda_all_gpu_architectures    += ['8.7', '8.9', '9.0']           # noqa: E221
+
+            if version_compare(cuda_version, '<12'):
+                cuda_hi_limit_gpu_architecture = '9.1'        # noqa: E221
+
+        if version_compare(cuda_version, '>=12.0'):
+            # https://docs.nvidia.com/cuda/cuda-toolkit-release-notes/index.html#deprecated-features (Current)
+            # https://docs.nvidia.com/cuda/archive/12.0/cuda-toolkit-release-notes/index.html#deprecated-features (Eventual?)
+            cuda_lo_limit_gpu_architecture = '5.0'            # noqa: E221
+
+            if version_compare(cuda_version, '<13'):
+                cuda_hi_limit_gpu_architecture = '10.0'       # noqa: E221
+
+        if not cuda_arch_list:
+            cuda_arch_list = 'Auto'
+
+        if   cuda_arch_list == 'All':     # noqa: E271
+            cuda_arch_list = cuda_known_gpu_architectures
+        elif cuda_arch_list == 'Common':  # noqa: E271
+            cuda_arch_list = cuda_common_gpu_architectures
+        elif cuda_arch_list == 'Auto':    # noqa: E271
+            if detected:
+                if isinstance(detected, list):
+                    cuda_arch_list = detected
+                else:
+                    cuda_arch_list = self._break_arch_string(detected)
+                cuda_arch_list = self._filter_cuda_arch_list(cuda_arch_list,
+                                                             cuda_lo_limit_gpu_architecture,
+                                                             cuda_hi_limit_gpu_architecture,
+                                                             cuda_common_gpu_architectures[-1])
+            else:
+                cuda_arch_list = cuda_common_gpu_architectures
+        elif isinstance(cuda_arch_list, str):
+            cuda_arch_list = self._break_arch_string(cuda_arch_list)
+
+        cuda_arch_list = sorted(x for x in set(cuda_arch_list) if x)
+
+        cuda_arch_bin = []
+        cuda_arch_ptx = []
+        for arch_name in cuda_arch_list:
+            arch_bin = []
+            arch_ptx = []
+            add_ptx = arch_name.endswith('+PTX')
+            if add_ptx:
+                arch_name = arch_name[:-len('+PTX')]
+
+            if re.fullmatch('[0-9]+\\.[0-9](\\([0-9]+\\.[0-9]\\))?', arch_name):
+                arch_bin, arch_ptx = [arch_name], [arch_name]
+            else:
+                arch_bin, arch_ptx = {
+                    'Fermi':         (['2.0', '2.1(2.0)'], []),
+                    'Kepler+Tegra':  (['3.2'],             []),
+                    'Kepler+Tesla':  (['3.7'],             []),
+                    'Kepler':        (['3.0', '3.5'],      ['3.5']),
+                    'Maxwell+Tegra': (['5.3'],             []),
+                    'Maxwell':       (['5.0', '5.2'],      ['5.2']),
+                    'Pascal':        (['6.0', '6.1'],      ['6.1']),
+                    'Pascal+Tegra':  (['6.2'],             []),
+                    'Volta':         (['7.0'],             ['7.0']),
+                    'Xavier':        (['7.2'],             []),
+                    'Turing':        (['7.5'],             ['7.5']),
+                    'Ampere':        (cuda_ampere_bin,     cuda_ampere_ptx),
+                    'Orin':          (['8.7'],             []),
+                    'Lovelace':      (['8.9'],             ['8.9']),
+                    'Hopper':        (['9.0'],             ['9.0']),
+                }.get(arch_name, (None, None))
+
+            if arch_bin is None:
+                raise InvalidArguments(f'Unknown CUDA Architecture Name {arch_name}!')
+
+            cuda_arch_bin += arch_bin
+
+            if add_ptx:
+                if not arch_ptx:
+                    arch_ptx = arch_bin
+                cuda_arch_ptx += arch_ptx
+
+        cuda_arch_bin = sorted(set(cuda_arch_bin))
+        cuda_arch_ptx = sorted(set(cuda_arch_ptx))
+
+        nvcc_flags = []
+        nvcc_archs_readable = []
+
+        for arch in cuda_arch_bin:
+            arch, codev = re.fullmatch(
+                '([0-9]+\\.[0-9])(?:\\(([0-9]+\\.[0-9])\\))?', arch).groups()
+
+            if version_compare(arch, '<' + cuda_lo_limit_gpu_architecture):
+                continue
+            if cuda_hi_limit_gpu_architecture and version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture):
+                continue
+
+            if codev:
+                arch = arch.replace('.', '')
+                codev = codev.replace('.', '')
+                nvcc_flags += ['-gencode', 'arch=compute_' + codev + ',code=sm_' + arch]
+                nvcc_archs_readable += ['sm_' + arch]
+            else:
+                arch = arch.replace('.', '')
+                nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=sm_' + arch]
+                nvcc_archs_readable += ['sm_' + arch]
+
+        for arch in cuda_arch_ptx:
+            arch, codev = re.fullmatch(
+                '([0-9]+\\.[0-9])(?:\\(([0-9]+\\.[0-9])\\))?', arch).groups()
+
+            if codev:
+                arch = codev
+
+            if version_compare(arch, '<' + cuda_lo_limit_gpu_architecture):
+                continue
+            if cuda_hi_limit_gpu_architecture and version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture):
+                continue
+
+            arch = arch.replace('.', '')
+            nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=compute_' + arch]
+            nvcc_archs_readable += ['compute_' + arch]
+
+        return nvcc_flags, nvcc_archs_readable
+
+def initialize(*args, **kwargs):
+    return CudaModule(*args, **kwargs)
diff --git a/vendored-meson/meson/mesonbuild/modules/dlang.py b/vendored-meson/meson/mesonbuild/modules/dlang.py
new file mode 100644
index 000000000000..6d5359fe3018
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/dlang.py
@@ -0,0 +1,137 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file contains the detection logic for external dependencies that
+# are UI-related.
+from __future__ import annotations
+
+import json
+import os
+
+from . import ExtensionModule, ModuleInfo
+from .. import mlog
+from ..dependencies import Dependency
+from ..dependencies.dub import DubDependency
+from ..interpreterbase import typed_pos_args
+from ..mesonlib import Popen_safe, MesonException
+
+class DlangModule(ExtensionModule):
+    class_dubbin = None
+    init_dub = False
+
+    INFO = ModuleInfo('dlang', '0.48.0')
+
+    def __init__(self, interpreter):
+        super().__init__(interpreter)
+        self.methods.update({
+            'generate_dub_file': self.generate_dub_file,
+        })
+
+    def _init_dub(self, state):
+        if DlangModule.class_dubbin is None:
+            self.dubbin = DubDependency.class_dubbin
+            DlangModule.class_dubbin = self.dubbin
+        else:
+            self.dubbin = DlangModule.class_dubbin
+
+        if DlangModule.class_dubbin is None:
+            self.dubbin = self.check_dub(state)
+            DlangModule.class_dubbin = self.dubbin
+        else:
+            self.dubbin = DlangModule.class_dubbin
+
+        if not self.dubbin:
+            if not self.dubbin:
+                raise MesonException('DUB not found.')
+
+    @typed_pos_args('dlang.generate_dub_file', str, str)
+    def generate_dub_file(self, state, args, kwargs):
+        if not DlangModule.init_dub:
+            self._init_dub(state)
+
+        config = {
+            'name': args[0]
+        }
+
+        config_path = os.path.join(args[1], 'dub.json')
+        if os.path.exists(config_path):
+            with open(config_path, encoding='utf-8') as ofile:
+                try:
+                    config = json.load(ofile)
+                except ValueError:
+                    mlog.warning('Failed to load the data in dub.json')
+
+        warn_publishing = ['description', 'license']
+        for arg in warn_publishing:
+            if arg not in kwargs and \
+               arg not in config:
+                mlog.warning('Without', mlog.bold(arg), 'the DUB package can\'t be published')
+
+        for key, value in kwargs.items():
+            if key == 'dependencies':
+                config[key] = {}
+                if isinstance(value, list):
+                    for dep in value:
+                        if isinstance(dep, Dependency):
+                            name = dep.get_name()
+                            ret, res = self._call_dubbin(['describe', name])
+                            if ret == 0:
+                                version = dep.get_version()
+                                if version is None:
+                                    config[key][name] = ''
+                                else:
+                                    config[key][name] = version
+                elif isinstance(value, Dependency):
+                    name = value.get_name()
+                    ret, res = self._call_dubbin(['describe', name])
+                    if ret == 0:
+                        version = value.get_version()
+                        if version is None:
+                            config[key][name] = ''
+                        else:
+                            config[key][name] = version
+            else:
+                config[key] = value
+
+        with open(config_path, 'w', encoding='utf-8') as ofile:
+            ofile.write(json.dumps(config, indent=4, ensure_ascii=False))
+
+    def _call_dubbin(self, args, env=None):
+        p, out = Popen_safe(self.dubbin.get_command() + args, env=env)[0:2]
+        return p.returncode, out.strip()
+
+    def check_dub(self, state):
+        dubbin = state.find_program('dub', silent=True)
+        if dubbin.found():
+            try:
+                p, out = Popen_safe(dubbin.get_command() + ['--version'])[0:2]
+                if p.returncode != 0:
+                    mlog.warning('Found dub {!r} but couldn\'t run it'
+                                 ''.format(' '.join(dubbin.get_command())))
+                    # Set to False instead of None to signify that we've already
+                    # searched for it and not found it
+                    dubbin = False
+            except (FileNotFoundError, PermissionError):
+                dubbin = False
+        else:
+            dubbin = False
+        if dubbin:
+            mlog.log('Found DUB:', mlog.bold(dubbin.get_path()),
+                     '(%s)' % out.strip())
+        else:
+            mlog.log('Found DUB:', mlog.red('NO'))
+        return dubbin
+
+def initialize(*args, **kwargs):
+    return DlangModule(*args, **kwargs)
diff --git a/vendored-meson/meson/mesonbuild/modules/external_project.py b/vendored-meson/meson/mesonbuild/modules/external_project.py
new file mode 100644
index 000000000000..a1d851491322
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/external_project.py
@@ -0,0 +1,313 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from pathlib import Path
+import os
+import shlex
+import subprocess
+import typing as T
+
+from . import ExtensionModule, ModuleReturnValue, NewExtensionModule, ModuleInfo
+from .. import mlog, build
+from ..compilers.compilers import CFLAGS_MAPPING
+from ..envconfig import ENV_VAR_PROG_MAP
+from ..dependencies import InternalDependency
+from ..dependencies.pkgconfig import PkgConfigDependency
+from ..interpreterbase import FeatureNew
+from ..interpreter.type_checking import ENV_KW, DEPENDS_KW
+from ..interpreterbase.decorators import ContainerTypeInfo, KwargInfo, typed_kwargs, typed_pos_args
+from ..mesonlib import (EnvironmentException, MesonException, Popen_safe, MachineChoice,
+                        get_variable_regex, do_replacement, join_args, OptionKey)
+
+if T.TYPE_CHECKING:
+    from typing_extensions import TypedDict
+
+    from . import ModuleState
+    from .._typing import ImmutableListProtocol
+    from ..build import BuildTarget, CustomTarget
+    from ..interpreter import Interpreter
+    from ..interpreterbase import TYPE_var
+
+    class Dependency(TypedDict):
+
+        subdir: str
+
+    class AddProject(TypedDict):
+
+        configure_options: T.List[str]
+        cross_configure_options: T.List[str]
+        verbose: bool
+        env: build.EnvironmentVariables
+        depends: T.List[T.Union[BuildTarget, CustomTarget]]
+
+
+class ExternalProject(NewExtensionModule):
+
+    make: ImmutableListProtocol[str]
+
+    def __init__(self,
+                 state: 'ModuleState',
+                 configure_command: str,
+                 configure_options: T.List[str],
+                 cross_configure_options: T.List[str],
+                 env: build.EnvironmentVariables,
+                 verbose: bool,
+                 extra_depends: T.List[T.Union['BuildTarget', 'CustomTarget']]):
+        super().__init__()
+        self.methods.update({'dependency': self.dependency_method,
+                             })
+
+        self.subdir = Path(state.subdir)
+        self.project_version = state.project_version
+        self.subproject = state.subproject
+        self.env = state.environment
+        self.build_machine = state.build_machine
+        self.host_machine = state.host_machine
+        self.configure_command = configure_command
+        self.configure_options = configure_options
+        self.cross_configure_options = cross_configure_options
+        self.verbose = verbose
+        self.user_env = env
+
+        self.src_dir = Path(self.env.get_source_dir(), self.subdir)
+        self.build_dir = Path(self.env.get_build_dir(), self.subdir, 'build')
+        self.install_dir = Path(self.env.get_build_dir(), self.subdir, 'dist')
+        _p = self.env.coredata.get_option(OptionKey('prefix'))
+        assert isinstance(_p, str), 'for mypy'
+        self.prefix = Path(_p)
+        _l = self.env.coredata.get_option(OptionKey('libdir'))
+        assert isinstance(_l, str), 'for mypy'
+        self.libdir = Path(_l)
+        _i = self.env.coredata.get_option(OptionKey('includedir'))
+        assert isinstance(_i, str), 'for mypy'
+        self.includedir = Path(_i)
+        self.name = self.src_dir.name
+
+        # On Windows if the prefix is "c:/foo" and DESTDIR is "c:/bar", `make`
+        # will install files into "c:/bar/c:/foo" which is an invalid path.
+        # Work around that issue by removing the drive from prefix.
+        if self.prefix.drive:
+            self.prefix = self.prefix.relative_to(self.prefix.drive)
+
+        # self.prefix is an absolute path, so we cannot append it to another path.
+        self.rel_prefix = self.prefix.relative_to(self.prefix.root)
+
+        self._configure(state)
+
+        self.targets = self._create_targets(extra_depends)
+
+    def _configure(self, state: 'ModuleState') -> None:
+        if self.configure_command == 'waf':
+            FeatureNew('Waf external project', '0.60.0').use(self.subproject, state.current_node)
+            waf = state.find_program('waf')
+            configure_cmd = waf.get_command()
+            configure_cmd += ['configure', '-o', str(self.build_dir)]
+            workdir = self.src_dir
+            self.make = waf.get_command() + ['build']
+        else:
+            # Assume it's the name of a script in source dir, like 'configure',
+            # 'autogen.sh', etc).
+            configure_path = Path(self.src_dir, self.configure_command)
+            configure_prog = state.find_program(configure_path.as_posix())
+            configure_cmd = configure_prog.get_command()
+            workdir = self.build_dir
+            self.make = state.find_program('make').get_command()
+
+        d = [('PREFIX', '--prefix=@PREFIX@', self.prefix.as_posix()),
+             ('LIBDIR', '--libdir=@PREFIX@/@LIBDIR@', self.libdir.as_posix()),
+             ('INCLUDEDIR', None, self.includedir.as_posix()),
+             ]
+        self._validate_configure_options(d, state)
+
+        configure_cmd += self._format_options(self.configure_options, d)
+
+        if self.env.is_cross_build():
+            host = '{}-{}-{}'.format(self.host_machine.cpu_family,
+                                     self.build_machine.system,
+                                     self.host_machine.system)
+            d = [('HOST', None, host)]
+            configure_cmd += self._format_options(self.cross_configure_options, d)
+
+        # Set common env variables like CFLAGS, CC, etc.
+        link_exelist: T.List[str] = []
+        link_args: T.List[str] = []
+        self.run_env = os.environ.copy()
+        for lang, compiler in self.env.coredata.compilers[MachineChoice.HOST].items():
+            if any(lang not in i for i in (ENV_VAR_PROG_MAP, CFLAGS_MAPPING)):
+                continue
+            cargs = self.env.coredata.get_external_args(MachineChoice.HOST, lang)
+            assert isinstance(cargs, list), 'for mypy'
+            self.run_env[ENV_VAR_PROG_MAP[lang]] = self._quote_and_join(compiler.get_exelist())
+            self.run_env[CFLAGS_MAPPING[lang]] = self._quote_and_join(cargs)
+            if not link_exelist:
+                link_exelist = compiler.get_linker_exelist()
+                _l = self.env.coredata.get_external_link_args(MachineChoice.HOST, lang)
+                assert isinstance(_l, list), 'for mypy'
+                link_args = _l
+        if link_exelist:
+            # FIXME: Do not pass linker because Meson uses CC as linker wrapper,
+            # but autotools often expects the real linker (e.h. GNU ld).
+            # self.run_env['LD'] = self._quote_and_join(link_exelist)
+            pass
+        self.run_env['LDFLAGS'] = self._quote_and_join(link_args)
+
+        self.run_env = self.user_env.get_env(self.run_env)
+        self.run_env = PkgConfigDependency.setup_env(self.run_env, self.env, MachineChoice.HOST,
+                                                     uninstalled=True)
+
+        self.build_dir.mkdir(parents=True, exist_ok=True)
+        self._run('configure', configure_cmd, workdir)
+
+    def _quote_and_join(self, array: T.List[str]) -> str:
+        return ' '.join([shlex.quote(i) for i in array])
+
+    def _validate_configure_options(self, variables: T.List[T.Tuple[str, str, str]], state: 'ModuleState') -> None:
+        # Ensure the user at least try to pass basic info to the build system,
+        # like the prefix, libdir, etc.
+        for key, default, val in variables:
+            if default is None:
+                continue
+            key_format = f'@{key}@'
+            for option in self.configure_options:
+                if key_format in option:
+                    break
+            else:
+                FeatureNew('Default configure_option', '0.57.0').use(self.subproject, state.current_node)
+                self.configure_options.append(default)
+
+    def _format_options(self, options: T.List[str], variables: T.List[T.Tuple[str, str, str]]) -> T.List[str]:
+        out: T.List[str] = []
+        missing = set()
+        regex = get_variable_regex('meson')
+        confdata: T.Dict[str, T.Tuple[str, T.Optional[str]]] = {k: (v, None) for k, _, v in variables}
+        for o in options:
+            arg, missing_vars = do_replacement(regex, o, 'meson', confdata)
+            missing.update(missing_vars)
+            out.append(arg)
+        if missing:
+            var_list = ", ".join(repr(m) for m in sorted(missing))
+            raise EnvironmentException(
+                f"Variables {var_list} in configure options are missing.")
+        return out
+
+    def _run(self, step: str, command: T.List[str], workdir: Path) -> None:
+        mlog.log(f'External project {self.name}:', mlog.bold(step))
+        m = 'Running command ' + str(command) + ' in directory ' + str(workdir) + '\n'
+        log_filename = Path(mlog.get_log_dir(), f'{self.name}-{step}.log')
+        output = None
+        if not self.verbose:
+            output = open(log_filename, 'w', encoding='utf-8')
+            output.write(m + '\n')
+            output.flush()
+        else:
+            mlog.log(m)
+        p, *_ = Popen_safe(command, cwd=workdir, env=self.run_env,
+                           stderr=subprocess.STDOUT,
+                           stdout=output)
+        if p.returncode != 0:
+            m = f'{step} step returned error code {p.returncode}.'
+            if not self.verbose:
+                m += '\nSee logs: ' + str(log_filename)
+            raise MesonException(m)
+
+    def _create_targets(self, extra_depends: T.List[T.Union['BuildTarget', 'CustomTarget']]) -> T.List['TYPE_var']:
+        cmd = self.env.get_build_command()
+        cmd += ['--internal', 'externalproject',
+                '--name', self.name,
+                '--srcdir', self.src_dir.as_posix(),
+                '--builddir', self.build_dir.as_posix(),
+                '--installdir', self.install_dir.as_posix(),
+                '--logdir', mlog.get_log_dir(),
+                '--make', join_args(self.make),
+                ]
+        if self.verbose:
+            cmd.append('--verbose')
+
+        self.target = build.CustomTarget(
+            self.name,
+            self.subdir.as_posix(),
+            self.subproject,
+            self.env,
+            cmd + ['@OUTPUT@', '@DEPFILE@'],
+            [],
+            [f'{self.name}.stamp'],
+            depfile=f'{self.name}.d',
+            console=True,
+            extra_depends=extra_depends,
+        )
+
+        idir = build.InstallDir(self.subdir.as_posix(),
+                                Path('dist', self.rel_prefix).as_posix(),
+                                install_dir='.',
+                                install_dir_name='.',
+                                install_mode=None,
+                                exclude=None,
+                                strip_directory=True,
+                                from_source_dir=False,
+                                subproject=self.subproject)
+
+        return [self.target, idir]
+
+    @typed_pos_args('external_project.dependency', str)
+    @typed_kwargs('external_project.dependency', KwargInfo('subdir', str, default=''))
+    def dependency_method(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'Dependency') -> InternalDependency:
+        libname = args[0]
+
+        abs_includedir = Path(self.install_dir, self.rel_prefix, self.includedir)
+        if kwargs['subdir']:
+            abs_includedir = Path(abs_includedir, kwargs['subdir'])
+        abs_libdir = Path(self.install_dir, self.rel_prefix, self.libdir)
+
+        version = self.project_version
+        compile_args = [f'-I{abs_includedir}']
+        link_args = [f'-L{abs_libdir}', f'-l{libname}']
+        sources = self.target
+        dep = InternalDependency(version, [], compile_args, link_args, [],
+                                 [], [sources], [], [], {}, [], [], [])
+        return dep
+
+
+class ExternalProjectModule(ExtensionModule):
+
+    INFO = ModuleInfo('External build system', '0.56.0', unstable=True)
+
+    def __init__(self, interpreter: 'Interpreter'):
+        super().__init__(interpreter)
+        self.methods.update({'add_project': self.add_project,
+                             })
+
+    @typed_pos_args('external_project_mod.add_project', str)
+    @typed_kwargs(
+        'external_project.add_project',
+        KwargInfo('configure_options', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo('cross_configure_options', ContainerTypeInfo(list, str), default=['--host=@HOST@'], listify=True),
+        KwargInfo('verbose', bool, default=False),
+        ENV_KW,
+        DEPENDS_KW.evolve(since='0.63.0'),
+    )
+    def add_project(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'AddProject') -> ModuleReturnValue:
+        configure_command = args[0]
+        project = ExternalProject(state,
+                                  configure_command,
+                                  kwargs['configure_options'],
+                                  kwargs['cross_configure_options'],
+                                  kwargs['env'],
+                                  kwargs['verbose'],
+                                  kwargs['depends'])
+        return ModuleReturnValue(project, project.targets)
+
+
+def initialize(interp: 'Interpreter') -> ExternalProjectModule:
+    return ExternalProjectModule(interp)
diff --git a/vendored-meson/meson/mesonbuild/modules/features/__init__.py b/vendored-meson/meson/mesonbuild/modules/features/__init__.py
new file mode 100644
index 000000000000..cd71ca43e738
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/features/__init__.py
@@ -0,0 +1,11 @@
+# Copyright (c) 2023, NumPy Developers.
+
+from typing import TYPE_CHECKING
+
+from .module import Module
+
+if TYPE_CHECKING:
+    from ...interpreter import Interpreter
+
+def initialize(interpreter: 'Interpreter') -> Module:
+    return Module()
diff --git a/vendored-meson/meson/mesonbuild/modules/features/feature.py b/vendored-meson/meson/mesonbuild/modules/features/feature.py
new file mode 100644
index 000000000000..7e0f621e543f
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/features/feature.py
@@ -0,0 +1,356 @@
+# Copyright (c) 2023, NumPy Developers.
+# All rights reserved.
+import re
+from typing import (
+    Dict, Set, Tuple, List, Callable, Optional,
+    Union, Any, Iterable, cast, TYPE_CHECKING
+)
+from dataclasses import dataclass, field
+from ...mesonlib import File, MesonException
+from ...interpreter.type_checking import NoneType
+from ...interpreterbase.decorators import (
+    noKwargs, noPosargs, KwargInfo, typed_kwargs, typed_pos_args,
+    ContainerTypeInfo
+)
+from .. import ModuleObject
+
+if TYPE_CHECKING:
+    from typing import TypedDict
+    from typing_extensions import NotRequired
+    from ...interpreterbase import TYPE_var, TYPE_kwargs
+    from ...compilers import Compiler
+    from .. import ModuleState
+
+@dataclass(unsafe_hash=True, order=True)
+class ConflictAttr:
+    """
+    Data class representing an feature attribute that may conflict
+    with other features attributes.
+
+    The reason behind this class to clear any possible conflicts with
+    compiler arguments when they joined together due gathering
+    the implied features or concatenate non-implied features.
+
+    Attributes:
+        val: The value of the feature attribute.
+        match: Regular expression pattern for matching conflicted values
+               (optional).
+        mfilter: Regular expression pattern for filtering these conflicted values
+               (optional).
+        mjoin: String used to join filtered values (optional)
+
+    """
+    val: str = field(hash=True, compare=True)
+    match: Union[re.Pattern, None] = field(
+        default=None, hash=False, compare=False
+    )
+    mfilter: Union[re.Pattern, None] = field(
+        default=None, hash=False, compare=False
+    )
+    mjoin: str = field(default='', hash=False, compare=False)
+
+    def copy(self) -> 'ConflictAttr':
+        return ConflictAttr(**self.__dict__)
+
+    def to_dict(self) -> Dict[str, str]:
+        ret: Dict[str, str] = {}
+        for attr in ('val', 'mjoin'):
+            ret[attr] = getattr(self, attr)
+        for attr in ('match', 'mfilter'):
+            val = getattr(self, attr)
+            if not val:
+                val = ''
+            else:
+                val = str(val)
+            ret[attr] = val
+        return ret
+
+class KwargConfilctAttr(KwargInfo):
+    def __init__(self, func_name: str, opt_name: str, default: Any = None):
+        types = (
+            NoneType, str, ContainerTypeInfo(dict, str),
+            ContainerTypeInfo(list, (dict, str))
+        )
+        super().__init__(
+            opt_name, types,
+            convertor = lambda values: self.convert(
+                func_name, opt_name, values
+            ),
+            default = default
+        )
+
+    @staticmethod
+    def convert(func_name:str, opt_name: str, values: 'IMPLIED_ATTR',
+                ) -> Union[None, List[ConflictAttr]]:
+        if values is None:
+            return None
+        ret: List[ConflictAttr] = []
+        values = [values] if isinstance(values, (str, dict)) else values
+        accepted_keys = ('val', 'match', 'mfilter', 'mjoin')
+        for edict in values:
+            if isinstance(edict, str):
+                if edict:
+                    ret.append(ConflictAttr(val=edict))
+                continue
+            if not isinstance(edict, dict):
+                # It shouldn't happen
+                # TODO: need exception here
+                continue
+            unknown_keys = [k for k in edict.keys() if k not in accepted_keys]
+            if unknown_keys:
+                raise MesonException(
+                    f'{func_name}: unknown keys {unknown_keys} in '
+                    f'option {opt_name}'
+                )
+            val = edict.get('val')
+            if val is None:
+                raise MesonException(
+                    f'{func_name}: option "{opt_name}" requires '
+                    f'a dictionary with key "val" to be set'
+                )
+            implattr = ConflictAttr(val=val, mjoin=edict.get('mjoin', ''))
+            for cattr in ('match', 'mfilter'):
+                cval = edict.get(cattr)
+                if not cval:
+                    continue
+                try:
+                    ccval = re.compile(cval)
+                except Exception as e:
+                    raise MesonException(
+                        '{func_name}: unable to '
+                        f'compile the regex in option "{opt_name}"\n'
+                        f'"{cattr}:{cval}" -> {str(e)}'
+                    )
+                setattr(implattr, cattr, ccval)
+            ret.append(implattr)
+        return ret
+
+if TYPE_CHECKING:
+    IMPLIED_ATTR = Union[
+        None, str, Dict[str, str], List[
+            Union[str, Dict[str, str]]
+        ]
+    ]
+    class FeatureKwArgs(TypedDict):
+        #implies: Optional[List['FeatureObject']]
+        implies: NotRequired[List[Any]]
+        group: NotRequired[List[str]]
+        detect: NotRequired[List[ConflictAttr]]
+        args: NotRequired[List[ConflictAttr]]
+        test_code: NotRequired[Union[str, File]]
+        extra_tests: NotRequired[Dict[str, Union[str, File]]]
+        disable: NotRequired[str]
+
+    class FeatureUpdateKwArgs(FeatureKwArgs):
+        name: NotRequired[str]
+        interest: NotRequired[int]
+
+class FeatureObject(ModuleObject):
+    name: str
+    interest: int
+    implies: Set['FeatureObject']
+    group: List[str]
+    detect: List[ConflictAttr]
+    args: List[ConflictAttr]
+    test_code: Union[str, File]
+    extra_tests: Dict[str, Union[str, File]]
+    disable: str
+
+    def __init__(self, state: 'ModuleState',
+                 args: List['TYPE_var'],
+                 kwargs: 'TYPE_kwargs') -> None:
+
+        super().__init__()
+
+        @typed_pos_args('features.new', str, int)
+        @typed_kwargs('features.new',
+            KwargInfo(
+                'implies',
+                (FeatureObject, ContainerTypeInfo(list, FeatureObject)),
+                default=[], listify=True
+            ),
+            KwargInfo(
+                'group', (str, ContainerTypeInfo(list, str)),
+                default=[], listify=True
+            ),
+            KwargConfilctAttr('features.new', 'detect', default=[]),
+            KwargConfilctAttr('features.new', 'args', default=[]),
+            KwargInfo('test_code', (str, File), default=''),
+            KwargInfo(
+                'extra_tests', (ContainerTypeInfo(dict, (str, File))),
+                default={}
+            ),
+            KwargInfo('disable', (str), default=''),
+        )
+        def init_attrs(state: 'ModuleState',
+                       args: Tuple[str, int],
+                       kwargs: 'FeatureKwArgs') -> None:
+            self.name = args[0]
+            self.interest = args[1]
+            self.implies = set(kwargs['implies'])
+            self.group = kwargs['group']
+            self.detect = kwargs['detect']
+            self.args = kwargs['args']
+            self.test_code = kwargs['test_code']
+            self.extra_tests = kwargs['extra_tests']
+            self.disable: str = kwargs['disable']
+            if not self.detect:
+                if self.group:
+                    self.detect = [ConflictAttr(val=f) for f in self.group]
+                else:
+                    self.detect = [ConflictAttr(val=self.name)]
+
+        init_attrs(state, args, kwargs)
+        self.methods.update({
+            'update': self.update_method,
+            'get': self.get_method,
+        })
+
+    def update_method(self, state: 'ModuleState', args: List['TYPE_var'],
+                      kwargs: 'TYPE_kwargs') -> 'FeatureObject':
+        @noPosargs
+        @typed_kwargs('features.FeatureObject.update',
+            KwargInfo('name', (NoneType, str)),
+            KwargInfo('interest', (NoneType, int)),
+            KwargInfo(
+                'implies', (
+                    NoneType, FeatureObject,
+                    ContainerTypeInfo(list, FeatureObject)
+                ),
+                listify=True
+            ),
+            KwargInfo(
+                'group', (NoneType, str, ContainerTypeInfo(list, str)),
+                listify=True
+            ),
+            KwargConfilctAttr('features.FeatureObject.update', 'detect'),
+            KwargConfilctAttr('features.FeatureObject.update', 'args'),
+            KwargInfo('test_code', (NoneType, str, File)),
+            KwargInfo(
+                'extra_tests', (
+                    NoneType, ContainerTypeInfo(dict, (str, File)))
+            ),
+            KwargInfo('disable', (NoneType, str)),
+        )
+        def update(state: 'ModuleState', args: List['TYPE_var'],
+                   kwargs: 'FeatureUpdateKwArgs') -> None:
+            for k, v in kwargs.items():
+                if v is not None and k != 'implies':
+                    setattr(self, k, v)
+            implies = kwargs.get('implies')
+            if implies is not None:
+                self.implies = set(implies)
+        update(state, args, kwargs)
+        return self
+
+    @noKwargs
+    @typed_pos_args('features.FeatureObject.get', str)
+    def get_method(self, state: 'ModuleState', args: Tuple[str],
+                   kwargs: 'TYPE_kwargs') -> 'TYPE_var':
+
+        impl_lst = lambda lst: [v.to_dict() for v in lst]
+        noconv = lambda v: v
+        dfunc = {
+            'name': noconv,
+            'interest': noconv,
+            'group': noconv,
+            'implies': lambda v: [fet.name for fet in sorted(v)],
+            'detect': impl_lst,
+            'args': impl_lst,
+            'test_code': noconv,
+            'extra_tests': noconv,
+            'disable': noconv
+        }
+        cfunc: Optional[Callable[[str], 'TYPE_var']] = dfunc.get(args[0])
+        if cfunc is None:
+            raise MesonException(f'Key {args[0]!r} is not in the feature.')
+        val = getattr(self, args[0])
+        return cfunc(val)
+
+    def get_implicit(self, _caller: Set['FeatureObject'] = None
+                     ) -> Set['FeatureObject']:
+        # infinity recursive guard since
+        # features can imply each other
+        _caller = {self, } if not _caller else _caller.union({self, })
+        implies = self.implies.difference(_caller)
+        ret = self.implies
+        for sub_fet in implies:
+            ret = ret.union(sub_fet.get_implicit(_caller))
+        return ret
+
+    @staticmethod
+    def get_implicit_multi(features: Iterable['FeatureObject']) -> Set['FeatureObject']:
+        implies = set().union(*[f.get_implicit() for f in features])
+        return implies
+
+    @staticmethod
+    def get_implicit_combine_multi(features: Iterable['FeatureObject']) -> Set['FeatureObject']:
+        return FeatureObject.get_implicit_multi(features).union(features)
+
+    @staticmethod
+    def sorted_multi(features: Iterable[Union['FeatureObject', Iterable['FeatureObject']]],
+                     reverse: bool = False
+                     ) -> List[Union['FeatureObject', Iterable['FeatureObject']]]:
+        def sort_cb(k: Union[FeatureObject, Iterable[FeatureObject]]) -> int:
+            if isinstance(k, FeatureObject):
+                return k.interest
+            # keep prevalent features and erase any implied features
+            implied_features = FeatureObject.get_implicit_multi(k)
+            prevalent_features = set(k).difference(implied_features)
+            if len(prevalent_features) == 0:
+                # It happens when all features imply each other.
+                # Set the highest interested feature
+                return sorted(k)[-1].interest
+            # multiple features
+            rank = max(f.interest for f in prevalent_features)
+            # FIXME: that's not a safe way to increase the rank for
+            # multi features this why this function isn't considerd
+            # accurate.
+            rank += len(prevalent_features) -1
+            return rank
+        return sorted(features, reverse=reverse, key=sort_cb)
+
+    @staticmethod
+    def features_names(features: Iterable[Union['FeatureObject', Iterable['FeatureObject']]]
+                       ) -> List[Union[str, List[str]]]:
+        return [
+            fet.name if isinstance(fet, FeatureObject)
+            else [f.name for f in fet]
+            for fet in features
+        ]
+
+    def __repr__(self) -> str:
+        args = ', '.join([
+            f'{attr} = {str(getattr(self, attr))}'
+            for attr in [
+                'group', 'implies',
+                'detect', 'args',
+                'test_code', 'extra_tests',
+                'disable'
+            ]
+        ])
+        return f'FeatureObject({self.name}, {self.interest}, {args})'
+
+    def __hash__(self) -> int:
+        return hash(self.name)
+
+    def __eq__(self, robj: object) -> bool:
+        if not isinstance(robj, FeatureObject):
+            return False
+        return self is robj and self.name == robj.name
+
+    def __lt__(self, robj: object) -> Any:
+        if not isinstance(robj, FeatureObject):
+            return NotImplemented
+        return self.interest < robj.interest
+
+    def __le__(self, robj: object) -> Any:
+        if not isinstance(robj, FeatureObject):
+            return NotImplemented
+        return self.interest <= robj.interest
+
+    def __gt__(self, robj: object) -> Any:
+        return robj < self
+
+    def __ge__(self, robj: object) -> Any:
+        return robj <= self
diff --git a/vendored-meson/meson/mesonbuild/modules/features/module.py b/vendored-meson/meson/mesonbuild/modules/features/module.py
new file mode 100644
index 000000000000..0be6af06c8b7
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/features/module.py
@@ -0,0 +1,714 @@
+# Copyright (c) 2023, NumPy Developers.
+import os
+from typing import (
+    Dict, Set, Tuple, List, Callable, Optional,
+    Union, Any, cast, TYPE_CHECKING
+)
+from ... import mlog, build
+from ...compilers import Compiler
+from ...mesonlib import File, MesonException
+from ...interpreter.type_checking import NoneType
+from ...interpreterbase.decorators import (
+    noKwargs, KwargInfo, typed_kwargs, typed_pos_args,
+    ContainerTypeInfo, permittedKwargs
+)
+from .. import ModuleInfo, NewExtensionModule, ModuleObject
+from .feature import FeatureObject, ConflictAttr
+from .utils import test_code, get_compiler, generate_hash
+
+if TYPE_CHECKING:
+    from typing import TypedDict
+    from ...interpreterbase import TYPE_var, TYPE_kwargs
+    from .. import ModuleState
+    from .feature import FeatureKwArgs
+
+    class TestKwArgs(TypedDict):
+        compiler: Optional[Compiler]
+        force_args: Optional[List[str]]
+        anyfet: bool
+        cached: bool
+
+    class TestResultKwArgs(TypedDict):
+        target_name: str
+        prevalent_features: List[str]
+        features: List[str]
+        args: List[str]
+        detect: List[str]
+        defines: List[str]
+        undefines: List[str]
+        is_supported: bool
+        is_disabled: bool
+        fail_reason: str
+
+class TargetsObject(ModuleObject):
+    def __init__(self) -> None:
+        super().__init__()
+        self._targets: Dict[
+            Union[FeatureObject, Tuple[FeatureObject, ...]],
+            List[build.StaticLibrary]
+        ] = {}
+        self._baseline: List[build.StaticLibrary] = []
+        self.methods.update({
+            'static_lib': self.static_lib_method,
+            'extend': self.extend_method
+        })
+
+    def extend_method(self, state: 'ModuleState',
+                      args: List['TYPE_var'],
+                      kwargs: 'TYPE_kwargs') -> 'TargetsObject':
+
+        @typed_pos_args('feature.TargetsObject.extend', TargetsObject)
+        @noKwargs
+        def test_args(state: 'ModuleState',
+                      args: Tuple[TargetsObject],
+                      kwargs: 'TYPE_kwargs') -> TargetsObject:
+            return args[0]
+        robj: TargetsObject = test_args(state, args, kwargs)
+        self._baseline.extend(robj._baseline)
+        for features, robj_targets in robj._targets.items():
+            targets: List[build.StaticLibrary] = self._targets.setdefault(features, [])
+            targets += robj_targets
+        return self
+
+    @typed_pos_args('features.TargetsObject.static_lib', str)
+    @noKwargs
+    def static_lib_method(self, state: 'ModuleState', args: Tuple[str],
+                          kwargs: 'TYPE_kwargs'
+                          ) -> Any:
+        # The linking order must be based on the lowest interested features,
+        # to ensures that the linker prioritizes any duplicate weak global symbols
+        # of the lowest interested features over the highest ones,
+        # starting with the baseline to avoid any possible crashes due
+        # to any involved optimizations that may generated based
+        # on the highest interested features.
+        link_whole = [] + self._baseline
+        tcast = Union[FeatureObject, Tuple[FeatureObject, ...]]
+        for features in FeatureObject.sorted_multi(self._targets.keys()):
+            link_whole += self._targets[cast(tcast, features)]
+        if not link_whole:
+            return []
+        static_lib = state._interpreter.func_static_lib(
+            None, [args[0]], {
+                'link_whole': link_whole
+            }
+        )
+        return static_lib
+
+    def add_baseline_target(self, target: build.StaticLibrary) -> None:
+        self._baseline.append(target)
+
+    def add_target(self, features: Union[FeatureObject, List[FeatureObject]],
+                   target: build.StaticLibrary) -> None:
+        tfeatures = (
+            features if isinstance(features, FeatureObject)
+            else tuple(sorted(features))
+        )
+        targets: List[build.StaticLibrary] = self._targets.setdefault(
+            tfeatures, cast(List[build.StaticLibrary], []))  # type: ignore
+        targets.append(target)
+
+class Module(NewExtensionModule):
+    INFO = ModuleInfo('features', '0.1.0')
+    def __init__(self) -> None:
+        super().__init__()
+        self.methods.update({
+            'new': self.new_method,
+            'test': self.test_method,
+            'implicit': self.implicit_method,
+            'implicit_c': self.implicit_c_method,
+            'sort': self.sort_method,
+            'multi_targets': self.multi_targets_method,
+        })
+
+    def new_method(self, state: 'ModuleState',
+                   args: List['TYPE_var'],
+                   kwargs: 'TYPE_kwargs') -> FeatureObject:
+        return FeatureObject(state, args, kwargs)
+
+    def _cache_dict(self, state: 'ModuleState'
+                    ) -> Dict[str, 'TestResultKwArgs']:
+        coredata = state.environment.coredata
+        attr_name = 'module_features_cache'
+        if not hasattr(coredata, attr_name):
+            setattr(coredata, attr_name, {})
+        return getattr(coredata, attr_name, {})
+
+    def _get_cache(self, state: 'ModuleState', key: str
+                   ) -> Optional['TestResultKwArgs']:
+        return self._cache_dict(state).get(key)
+
+    def _set_cache(self, state: 'ModuleState', key: str,
+                   val: 'TestResultKwArgs') -> None:
+        self._cache_dict(state)[key] = val
+
+    @typed_pos_args('features.test', varargs=FeatureObject, min_varargs=1)
+    @typed_kwargs('features.test',
+        KwargInfo('compiler', (NoneType, Compiler)),
+        KwargInfo('anyfet', bool, default = False),
+        KwargInfo('cached', bool, default = True),
+        KwargInfo(
+            'force_args', (NoneType, str, ContainerTypeInfo(list, str)),
+            listify=True
+        ),
+    )
+    def test_method(self, state: 'ModuleState',
+                    args: Tuple[List[FeatureObject]],
+                    kwargs: 'TestKwArgs'
+                    ) -> List[Union[bool, 'TestResultKwArgs']]:
+
+        features = args[0]
+        features_set = set(features)
+        anyfet = kwargs['anyfet']
+        cached = kwargs['cached']
+        compiler = kwargs.get('compiler')
+        if not compiler:
+            compiler = get_compiler(state)
+
+        force_args = kwargs['force_args']
+        if force_args is not None:
+            # removes in empty strings
+            force_args = [a for a in force_args if a]
+
+        test_cached, test_result = self.cached_test(
+            state, features=features_set,
+            compiler=compiler,
+            anyfet=anyfet,
+            cached=cached,
+            force_args=force_args
+        )
+        if not test_result['is_supported']:
+            if test_result['is_disabled']:
+                label = mlog.yellow('disabled')
+            else:
+                label = mlog.yellow('Unsupported')
+        else:
+            label = mlog.green('Supported')
+            if anyfet:
+                unsupported = ' '.join([
+                    fet.name for fet in sorted(features_set)
+                    if fet.name not in test_result['features']
+                ])
+                if unsupported:
+                    label = mlog.green(f'Parial support, missing({unsupported})')
+
+        features_names = ' '.join([f.name for f in features])
+        log_prefix = f'Test features "{mlog.bold(features_names)}" :'
+        cached_msg = f'({mlog.blue("cached")})' if test_cached else ''
+        if not test_result['is_supported']:
+            mlog.log(log_prefix, label, 'due to', test_result['fail_reason'])
+        else:
+            mlog.log(log_prefix, label, cached_msg)
+        return [test_result['is_supported'], test_result]
+
+    def cached_test(self, state: 'ModuleState',
+                    features: Set[FeatureObject],
+                    compiler: 'Compiler',
+                    force_args: Optional[List[str]],
+                    anyfet: bool, cached: bool,
+                    _caller: Optional[Set[FeatureObject]] = None
+                    ) -> Tuple[bool, 'TestResultKwArgs']:
+
+        if cached:
+            test_hash = generate_hash(
+                sorted(features), compiler,
+                anyfet, force_args
+            )
+            test_result = self._get_cache(state, test_hash)
+            if test_result is not None:
+                return True, test_result
+
+        if anyfet:
+            test_func = self.test_any
+        else:
+            test_func = self.test
+
+        test_result = test_func(
+            state, features=features,
+            compiler=compiler,
+            force_args=force_args,
+            cached=cached,
+            _caller=_caller
+        )
+        if cached:
+            self._set_cache(state, test_hash, test_result)
+        return False, test_result
+
+    def test_any(self, state: 'ModuleState', features: Set[FeatureObject],
+                 compiler: 'Compiler',
+                 force_args: Optional[List[str]],
+                 cached: bool,
+                 # dummy no need for recrusive guard
+                 _caller: Optional[Set[FeatureObject]] = None,
+                 ) -> 'TestResultKwArgs':
+
+        _, test_any_result = self.cached_test(
+            state, features=features,
+            compiler=compiler,
+            anyfet=False,
+            cached=cached,
+            force_args=force_args,
+        )
+        if test_any_result['is_supported']:
+            return test_any_result
+
+        all_features = sorted(FeatureObject.get_implicit_combine_multi(features))
+        features_any = set()
+        for fet in all_features:
+            _, test_any_result = self.cached_test(
+                state, features={fet,},
+                compiler=compiler,
+                cached=cached,
+                anyfet=False,
+                force_args=force_args,
+            )
+            if test_any_result['is_supported']:
+                features_any.add(fet)
+
+        _, test_any_result = self.cached_test(
+            state, features=features_any,
+            compiler=compiler,
+            cached=cached,
+            anyfet=False,
+            force_args=force_args,
+        )
+        return test_any_result
+
+    def test(self, state: 'ModuleState', features: Set[FeatureObject],
+             compiler: 'Compiler',
+             force_args: Optional[List[str]] = None,
+             cached: bool = True,
+             _caller: Optional[Set[FeatureObject]] = None
+             ) -> 'TestResultKwArgs':
+
+        implied_features = FeatureObject.get_implicit_multi(features)
+        all_features = sorted(implied_features.union(features))
+        # For multiple features, it important to erase any features
+        # implied by another to avoid duplicate testing since
+        # implied features already tested also we use this set to genrate
+        # unque target name that can be used for multiple targets
+        # build.
+        prevalent_features = sorted(features.difference(implied_features))
+        if len(prevalent_features) == 0:
+            # It happens when all features imply each other.
+            # Set the highest interested feature
+            prevalent_features = sorted(features)[-1:]
+
+        prevalent_names =  [fet.name for fet in prevalent_features]
+        # prepare the result dict
+        test_result: 'TestResultKwArgs' = {
+            'target_name': '__'.join(prevalent_names),
+            'prevalent_features': prevalent_names,
+            'features': [fet.name for fet in all_features],
+            'args': [],
+            'detect': [],
+            'defines': [],
+            'undefines': [],
+            'is_supported': True,
+            'is_disabled': False,
+            'fail_reason': '',
+        }
+        def fail_result(fail_reason: str, is_disabled: bool = False
+                        ) -> 'TestResultKwArgs':
+            test_result.update({
+                'features': [],
+                'args': [],
+                'detect': [],
+                'defines': [],
+                'undefines': [],
+                'is_supported': False,
+                'is_disabled': is_disabled,
+                'fail_reason': fail_reason,
+            })
+            return test_result
+
+        # test any of prevalent features wither they disabled or not
+        for fet in prevalent_features:
+            if fet.disable:
+                return fail_result(
+                    f'{fet.name} is disabled due to "{fet.disable}"',
+                    True
+                )
+
+        # since we allows features to imply each other
+        # items of `features` may part of `implied_features`
+        if _caller is None:
+            _caller = set()
+        _caller = _caller.union(prevalent_features)
+        predecessor_features = implied_features.difference(_caller)
+        for fet in sorted(predecessor_features):
+            _, pred_result = self.cached_test(
+                state, features={fet,},
+                compiler=compiler,
+                cached=cached,
+                anyfet=False,
+                force_args=force_args,
+                _caller=_caller,
+            )
+            if not pred_result['is_supported']:
+                reason = f'Implied feature "{fet.name}" '
+                pred_disabled = pred_result['is_disabled']
+                if pred_disabled:
+                    fail_reason = reason + 'is disabled'
+                else:
+                    fail_reason = reason + 'is not supported'
+                return fail_result(fail_reason, pred_disabled)
+
+            for k in ['defines', 'undefines']:
+                def_values = test_result[k]  # type: ignore
+                pred_values = pred_result[k]  # type: ignore
+                def_values += [v for v in pred_values if v not in def_values]
+
+        # Sort based on the lowest interest to deal with conflict attributes
+        # when combine all attributes togathers
+        conflict_attrs = ['detect']
+        if force_args is None:
+            conflict_attrs += ['args']
+        else:
+            test_result['args'] = force_args
+
+        for fet in all_features:
+            for attr in conflict_attrs:
+                values: List[ConflictAttr] = getattr(fet, attr)
+                accumulate_values = test_result[attr]  # type: ignore
+                for conflict in values:
+                    if not conflict.match:
+                        accumulate_values.append(conflict.val)
+                        continue
+                    conflict_vals: List[str] = []
+                    # select the acc items based on the match
+                    new_acc: List[str] = []
+                    for acc in accumulate_values:
+                        # not affected by the match so we keep it
+                        if not conflict.match.match(acc):
+                            new_acc.append(acc)
+                            continue
+                        # no filter so we totaly escape it
+                        if not conflict.mfilter:
+                            continue
+                        filter_val = conflict.mfilter.findall(acc)
+                        filter_val = [
+                            conflict.mjoin.join([i for i in val if i])
+                            if isinstance(val, tuple) else val
+                            for val in filter_val if val
+                        ]
+                        # no filter match so we totaly escape it
+                        if not filter_val:
+                            continue
+                        conflict_vals.append(conflict.mjoin.join(filter_val))
+                    new_acc.append(conflict.val + conflict.mjoin.join(conflict_vals))
+                    test_result[attr] = new_acc  # type: ignore
+
+        test_args = compiler.has_multi_arguments
+        args = test_result['args']
+        if args:
+            supported_args, test_cached = test_args(args, state.environment)
+            if not supported_args:
+                return fail_result(
+                    f'Arguments "{", ".join(args)}" are not supported'
+                )
+
+        for fet in prevalent_features:
+            if fet.test_code:
+                _, tested_code, _ = test_code(
+                    state, compiler, args, fet.test_code
+                )
+                if not tested_code:
+                    return fail_result(
+                        f'Compiler fails against the test code of "{fet.name}"'
+                    )
+
+            test_result['defines'] += [fet.name] + fet.group
+            for extra_name, extra_test in fet.extra_tests.items():
+                _, tested_code, _ = test_code(
+                    state, compiler, args, extra_test
+                )
+                k = 'defines' if tested_code else 'undefines'
+                test_result[k].append(extra_name)  # type: ignore
+        return test_result
+
+    @permittedKwargs(build.known_stlib_kwargs | {
+        'dispatch', 'baseline', 'prefix', 'cached', 'keep_sort'
+    })
+    @typed_pos_args('features.multi_targets', str, min_varargs=1, varargs=(
+        str, File, build.CustomTarget, build.CustomTargetIndex,
+        build.GeneratedList, build.StructuredSources, build.ExtractedObjects,
+        build.BuildTarget
+    ))
+    @typed_kwargs('features.multi_targets',
+        KwargInfo(
+            'dispatch', (
+                ContainerTypeInfo(list, (FeatureObject, list)),
+            ),
+            default=[]
+        ),
+        KwargInfo(
+            'baseline', (
+                NoneType,
+                ContainerTypeInfo(list, FeatureObject)
+            )
+        ),
+        KwargInfo('prefix', str, default=''),
+        KwargInfo('compiler', (NoneType, Compiler)),
+        KwargInfo('cached', bool, default = True),
+        KwargInfo('keep_sort', bool, default = False),
+        allow_unknown=True
+    )
+    def multi_targets_method(self, state: 'ModuleState',
+                            args: Tuple[str], kwargs: 'TYPE_kwargs'
+                            ) -> TargetsObject:
+        config_name = args[0]
+        sources = args[1]  # type: ignore
+        dispatch: List[Union[FeatureObject, List[FeatureObject]]] = (
+            kwargs.pop('dispatch') # type: ignore
+        )
+        baseline: Optional[List[FeatureObject]] = (
+            kwargs.pop('baseline')  # type: ignore
+        )
+        prefix: str = kwargs.pop('prefix')  # type: ignore
+        cached: bool = kwargs.pop('cached')  # type: ignore
+        compiler: Optional[Compiler] = kwargs.pop('compiler')  # type: ignore
+        if not compiler:
+            compiler = get_compiler(state)
+
+        baseline_features : Set[FeatureObject] = set()
+        has_baseline = baseline is not None
+        if has_baseline:
+            baseline_features = FeatureObject.get_implicit_combine_multi(baseline)
+            _, baseline_test_result = self.cached_test(
+                state, features=set(baseline),
+                anyfet=True, cached=cached,
+                compiler=compiler,
+                force_args=None
+            )
+
+        enabled_targets_names: List[str] = []
+        enabled_targets_features: List[Union[
+            FeatureObject, List[FeatureObject]
+        ]] = []
+        enabled_targets_tests: List['TestResultKwArgs'] = []
+        skipped_targets: List[Tuple[
+            Union[FeatureObject, List[FeatureObject]], str
+        ]] = []
+        for d in dispatch:
+            if isinstance(d, FeatureObject):
+                target = {d,}
+                is_base_part = d in baseline_features
+            else:
+                target = set(d)
+                is_base_part = all(f in baseline_features for f in d)
+
+            if is_base_part:
+                skipped_targets.append((d, "part of baseline features"))
+                continue
+            _, test_result = self.cached_test(
+                state=state, features=target,
+                anyfet=False, cached=cached,
+                compiler=compiler,
+                force_args=None
+            )
+            if not test_result['is_supported']:
+                skipped_targets.append(
+                    (d, test_result['fail_reason'])
+                )
+                continue
+            target_name = test_result['target_name']
+            if target_name in enabled_targets_names:
+                skipped_targets.append((
+                    d, f'Dublicate target name "{target_name}"'
+                ))
+                continue
+            enabled_targets_names.append(target_name)
+            enabled_targets_features.append(d)
+            enabled_targets_tests.append(test_result)
+
+        if not kwargs.pop('keep_sort'):
+            enabled_targets_sorted = FeatureObject.sorted_multi(enabled_targets_features, reverse=True)
+            if enabled_targets_features != enabled_targets_sorted:
+                log_targets = FeatureObject.features_names(enabled_targets_features)
+                log_targets_sorted = FeatureObject.features_names(enabled_targets_sorted)
+                raise MesonException(
+                    'The enabled dispatch features should be sorted based on the highest interest:\n'
+                    f'Expected: {log_targets_sorted}\n'
+                    f'Got: {log_targets}\n'
+                    'Note: This validation may not be accurate when dealing with multi-features '
+                    'per single target.\n'
+                    'You can keep the current sort and bypass this validation by passing '
+                    'the argument "keep_sort: true".'
+                )
+
+        config_path = self.gen_config(
+            state,
+            config_name=config_name,
+            targets=enabled_targets_tests,
+            prefix=prefix,
+            has_baseline=has_baseline
+        )
+        mtargets_obj = TargetsObject()
+        if has_baseline:
+            mtargets_obj.add_baseline_target(
+                self.gen_target(
+                    state=state, config_name=config_name,
+                    sources=sources, test_result=baseline_test_result,
+                    prefix=prefix, is_baseline=True,
+                    stlib_kwargs=kwargs
+                )
+            )
+        for features_objects, target_test in zip(enabled_targets_features, enabled_targets_tests):
+            static_lib = self.gen_target(
+                state=state, config_name=config_name,
+                sources=sources, test_result=target_test,
+                prefix=prefix, is_baseline=False,
+                stlib_kwargs=kwargs
+            )
+            mtargets_obj.add_target(features_objects, static_lib)
+
+        skipped_targets_info: List[str] = []
+        skipped_tab = ' '*4
+        for skipped, reason in skipped_targets:
+            name = ', '.join(
+                [skipped.name] if isinstance(skipped, FeatureObject)
+                else [fet.name for fet in skipped]
+            )
+            skipped_targets_info.append(f'{skipped_tab}"{name}": "{reason}"')
+
+        target_info: Callable[[str, 'TestResultKwArgs'], str] = lambda target_name, test_result: (
+            f'{skipped_tab}"{target_name}":\n' + '\n'.join([
+                f'{skipped_tab*2}"{k}": {v}'
+                for k, v in test_result.items()
+            ])
+        )
+        enabled_targets_info: List[str] = [
+            target_info(test_result['target_name'], test_result)
+            for test_result in enabled_targets_tests
+        ]
+        if has_baseline:
+            enabled_targets_info.append(target_info(
+                f'baseline({baseline_test_result["target_name"]})',
+                baseline_test_result
+            ))
+            enabled_targets_names += ['baseline']
+
+        mlog.log(
+            f'Generating multi-targets for "{mlog.bold(config_name)}"',
+            '\n  Enabled targets:',
+            mlog.green(', '.join(enabled_targets_names))
+        )
+        mlog.debug(
+            f'Generating multi-targets for "{config_name}"',
+            '\n  Config path:', config_path,
+            '\n  Enabled targets:',
+            '\n'+'\n'.join(enabled_targets_info),
+            '\n  Skipped targets:',
+            '\n'+'\n'.join(skipped_targets_info),
+            '\n'
+        )
+        return mtargets_obj
+
+    def gen_target(self, state: 'ModuleState', config_name: str,
+                   sources: List[Union[
+                      str, File, build.CustomTarget, build.CustomTargetIndex,
+                      build.GeneratedList, build.StructuredSources, build.ExtractedObjects,
+                      build.BuildTarget
+                   ]],
+                   test_result: 'TestResultKwArgs',
+                   prefix: str, is_baseline: bool,
+                   stlib_kwargs: Dict[str, Any]
+                   ) -> build.StaticLibrary:
+
+        target_name = 'baseline' if is_baseline else test_result['target_name']
+        args = [f'-D{prefix}HAVE_{df}' for df in test_result['defines']]
+        args += test_result['args']
+        if is_baseline:
+            args.append(f'-D{prefix}MTARGETS_BASELINE')
+        else:
+            args.append(f'-D{prefix}MTARGETS_CURRENT={target_name}')
+        stlib_kwargs = stlib_kwargs.copy()
+        stlib_kwargs.update({
+            'sources': sources,
+            'c_args': stlib_kwargs.get('c_args', []) + args,
+            'cpp_args': stlib_kwargs.get('cpp_args', []) + args
+        })
+        static_lib: build.StaticLibrary = state._interpreter.func_static_lib(
+            None, [config_name + '_' + target_name],
+            stlib_kwargs
+        )
+        return static_lib
+
+    def gen_config(self, state: 'ModuleState', config_name: str,
+                   targets: List['TestResultKwArgs'],
+                   prefix: str, has_baseline: bool
+                   ) -> str:
+
+        dispatch_calls: List[str] = []
+        for test in targets:
+            c_detect = '&&'.join([
+                f'TEST_CB({d})' for d in test['detect']
+            ])
+            if c_detect:
+                c_detect = f'({c_detect})'
+            else:
+                c_detect = '1'
+            dispatch_calls.append(
+                f'{prefix}_MTARGETS_EXPAND('
+                    f'EXEC_CB({c_detect}, {test["target_name"]}, __VA_ARGS__)'
+                ')'
+            )
+
+        config_file = [
+            '/* Autogenerated by the Meson features module. */',
+            '/* Do not edit, your changes will be lost. */',
+            '',
+            f'#undef {prefix}_MTARGETS_EXPAND',
+            f'#define {prefix}_MTARGETS_EXPAND(X) X',
+            '',
+            f'#undef {prefix}MTARGETS_CONF_BASELINE',
+            f'#define {prefix}MTARGETS_CONF_BASELINE(EXEC_CB, ...) ' + (
+                f'{prefix}_MTARGETS_EXPAND(EXEC_CB(__VA_ARGS__))'
+                if has_baseline else ''
+            ),
+            '',
+            f'#undef {prefix}MTARGETS_CONF_DISPATCH',
+            f'#define {prefix}MTARGETS_CONF_DISPATCH(TEST_CB, EXEC_CB, ...) \\',
+            ' \\\n'.join(dispatch_calls),
+            '',
+        ]
+
+        build_dir = state.environment.build_dir
+        sub_dir = state.subdir
+        if sub_dir:
+            build_dir = os.path.join(build_dir, sub_dir)
+        config_path = os.path.abspath(os.path.join(build_dir, config_name))
+
+        os.makedirs(os.path.dirname(config_path), exist_ok=True)
+        with open(config_path, "w", encoding='utf-8') as cout:
+            cout.write('\n'.join(config_file))
+
+        return config_path
+
+    @typed_pos_args('features.sort', varargs=FeatureObject, min_varargs=1)
+    @typed_kwargs('features.sort',
+        KwargInfo('reverse', bool, default = False),
+    )
+    def sort_method(self, state: 'ModuleState',
+                    args: Tuple[List[FeatureObject]],
+                    kwargs: Dict[str, bool]
+                    ) -> List[FeatureObject]:
+        return sorted(args[0], reverse=kwargs['reverse'])
+
+    @typed_pos_args('features.implicit', varargs=FeatureObject, min_varargs=1)
+    @noKwargs
+    def implicit_method(self, state: 'ModuleState',
+                        args: Tuple[List[FeatureObject]],
+                        kwargs: 'TYPE_kwargs'
+                        ) -> List[FeatureObject]:
+
+        features = args[0]
+        return sorted(FeatureObject.get_implicit_multi(features))
+
+    @typed_pos_args('features.implicit', varargs=FeatureObject, min_varargs=1)
+    @noKwargs
+    def implicit_c_method(self, state: 'ModuleState',
+                          args: Tuple[List[FeatureObject]],
+                          kwargs: 'TYPE_kwargs'
+                          ) -> List[FeatureObject]:
+        return sorted(FeatureObject.get_implicit_combine_multi(args[0]))
diff --git a/vendored-meson/meson/mesonbuild/modules/features/utils.py b/vendored-meson/meson/mesonbuild/modules/features/utils.py
new file mode 100644
index 000000000000..74cb45d8648b
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/features/utils.py
@@ -0,0 +1,39 @@
+# Copyright (c) 2023, NumPy Developers.
+import hashlib
+from typing import Tuple, List, Union, Any, TYPE_CHECKING
+from ...mesonlib import MesonException, MachineChoice
+
+if TYPE_CHECKING:
+    from ...compilers import Compiler
+    from ...mesonlib import File
+    from .. import ModuleState
+
+def get_compiler(state: 'ModuleState') -> 'Compiler':
+    for_machine = MachineChoice.BUILD
+    clist = state.environment.coredata.compilers[for_machine]
+    for cstr in ('c', 'cpp'):
+        try:
+            compiler = clist[cstr]
+            break
+        except KeyError:
+            raise MesonException(
+                'Unable to get compiler for C or C++ language '
+                'try to specify a valid C/C++ compiler via option "compiler".'
+            )
+    return compiler
+
+def test_code(state: 'ModuleState', compiler: 'Compiler',
+              args: List[str], code: 'Union[str, File]'
+              ) -> Tuple[bool, bool, str]:
+    # TODO: Add option to treat warnings as errors
+    with compiler.cached_compile(
+        code, state.environment.coredata, extra_args=args
+    ) as p:
+        return p.cached, p.returncode == 0, p.stderr
+
+def generate_hash(*args: Any) -> str:
+    hasher = hashlib.sha1()
+    test: List[bytes] = []
+    for a in args:
+        hasher.update(bytes(str(a), encoding='utf-8'))
+    return hasher.hexdigest()
diff --git a/vendored-meson/meson/mesonbuild/modules/fs.py b/vendored-meson/meson/mesonbuild/modules/fs.py
new file mode 100644
index 000000000000..7d969958838d
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/fs.py
@@ -0,0 +1,315 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+from pathlib import Path, PurePath, PureWindowsPath
+import hashlib
+import os
+import typing as T
+
+from . import ExtensionModule, ModuleReturnValue, ModuleInfo
+from .. import mlog
+from ..build import CustomTarget, InvalidArguments
+from ..interpreter.type_checking import INSTALL_KW, INSTALL_MODE_KW, INSTALL_TAG_KW, NoneType
+from ..interpreterbase import FeatureNew, KwargInfo, typed_kwargs, typed_pos_args, noKwargs
+from ..mesonlib import (
+    File,
+    MesonException,
+    has_path_sep,
+    path_is_in_root,
+)
+
+if T.TYPE_CHECKING:
+    from . import ModuleState
+    from ..interpreter import Interpreter
+    from ..mesonlib import FileOrString, FileMode
+
+    from typing_extensions import TypedDict
+
+    class ReadKwArgs(TypedDict):
+        """Keyword Arguments for fs.read."""
+
+        encoding: str
+
+    class CopyKw(TypedDict):
+
+        """Kwargs for fs.copy"""
+
+        install: bool
+        install_dir: T.Optional[str]
+        install_mode: FileMode
+        install_tag: T.Optional[str]
+
+
+class FSModule(ExtensionModule):
+
+    INFO = ModuleInfo('fs', '0.53.0')
+
+    def __init__(self, interpreter: 'Interpreter') -> None:
+        super().__init__(interpreter)
+        self.methods.update({
+            'expanduser': self.expanduser,
+            'is_absolute': self.is_absolute,
+            'as_posix': self.as_posix,
+            'exists': self.exists,
+            'is_symlink': self.is_symlink,
+            'is_file': self.is_file,
+            'is_dir': self.is_dir,
+            'hash': self.hash,
+            'size': self.size,
+            'is_samepath': self.is_samepath,
+            'replace_suffix': self.replace_suffix,
+            'parent': self.parent,
+            'name': self.name,
+            'stem': self.stem,
+            'read': self.read,
+            'copyfile': self.copyfile,
+        })
+
+    def _absolute_dir(self, state: 'ModuleState', arg: 'FileOrString') -> Path:
+        """
+        make an absolute path from a relative path, WITHOUT resolving symlinks
+        """
+        if isinstance(arg, File):
+            return Path(arg.absolute_path(state.source_root, self.interpreter.environment.get_build_dir()))
+        return Path(state.source_root) / Path(state.subdir) / Path(arg).expanduser()
+
+    def _resolve_dir(self, state: 'ModuleState', arg: 'FileOrString') -> Path:
+        """
+        resolves symlinks and makes absolute a directory relative to calling meson.build,
+        if not already absolute
+        """
+        path = self._absolute_dir(state, arg)
+        try:
+            # accommodate unresolvable paths e.g. symlink loops
+            path = path.resolve()
+        except Exception:
+            # return the best we could do
+            pass
+        return path
+
+    @noKwargs
+    @FeatureNew('fs.expanduser', '0.54.0')
+    @typed_pos_args('fs.expanduser', str)
+    def expanduser(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str:
+        return str(Path(args[0]).expanduser())
+
+    @noKwargs
+    @FeatureNew('fs.is_absolute', '0.54.0')
+    @typed_pos_args('fs.is_absolute', (str, File))
+    def is_absolute(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool:
+        if isinstance(args[0], File):
+            FeatureNew('fs.is_absolute_file', '0.59.0').use(state.subproject)
+        return PurePath(str(args[0])).is_absolute()
+
+    @noKwargs
+    @FeatureNew('fs.as_posix', '0.54.0')
+    @typed_pos_args('fs.as_posix', str)
+    def as_posix(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str:
+        """
+        this function assumes you are passing a Windows path, even if on a Unix-like system
+        and so ALL '\' are turned to '/', even if you meant to escape a character
+        """
+        return PureWindowsPath(args[0]).as_posix()
+
+    @noKwargs
+    @typed_pos_args('fs.exists', str)
+    def exists(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
+        return self._resolve_dir(state, args[0]).exists()
+
+    @noKwargs
+    @typed_pos_args('fs.is_symlink', (str, File))
+    def is_symlink(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool:
+        if isinstance(args[0], File):
+            FeatureNew('fs.is_symlink_file', '0.59.0').use(state.subproject)
+        return self._absolute_dir(state, args[0]).is_symlink()
+
+    @noKwargs
+    @typed_pos_args('fs.is_file', str)
+    def is_file(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
+        return self._resolve_dir(state, args[0]).is_file()
+
+    @noKwargs
+    @typed_pos_args('fs.is_dir', str)
+    def is_dir(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool:
+        return self._resolve_dir(state, args[0]).is_dir()
+
+    @noKwargs
+    @typed_pos_args('fs.hash', (str, File), str)
+    def hash(self, state: 'ModuleState', args: T.Tuple['FileOrString', str], kwargs: T.Dict[str, T.Any]) -> str:
+        if isinstance(args[0], File):
+            FeatureNew('fs.hash_file', '0.59.0').use(state.subproject)
+        file = self._resolve_dir(state, args[0])
+        if not file.is_file():
+            raise MesonException(f'{file} is not a file and therefore cannot be hashed')
+        try:
+            h = hashlib.new(args[1])
+        except ValueError:
+            raise MesonException('hash algorithm {} is not available'.format(args[1]))
+        mlog.debug('computing {} sum of {} size {} bytes'.format(args[1], file, file.stat().st_size))
+        h.update(file.read_bytes())
+        return h.hexdigest()
+
+    @noKwargs
+    @typed_pos_args('fs.size', (str, File))
+    def size(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> int:
+        if isinstance(args[0], File):
+            FeatureNew('fs.size_file', '0.59.0').use(state.subproject)
+        file = self._resolve_dir(state, args[0])
+        if not file.is_file():
+            raise MesonException(f'{file} is not a file and therefore cannot be sized')
+        try:
+            return file.stat().st_size
+        except ValueError:
+            raise MesonException('{} size could not be determined'.format(args[0]))
+
+    @noKwargs
+    @typed_pos_args('fs.is_samepath', (str, File), (str, File))
+    def is_samepath(self, state: 'ModuleState', args: T.Tuple['FileOrString', 'FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool:
+        if isinstance(args[0], File) or isinstance(args[1], File):
+            FeatureNew('fs.is_samepath_file', '0.59.0').use(state.subproject)
+        file1 = self._resolve_dir(state, args[0])
+        file2 = self._resolve_dir(state, args[1])
+        if not file1.exists():
+            return False
+        if not file2.exists():
+            return False
+        try:
+            return file1.samefile(file2)
+        except OSError:
+            return False
+
+    @noKwargs
+    @typed_pos_args('fs.replace_suffix', (str, File), str)
+    def replace_suffix(self, state: 'ModuleState', args: T.Tuple['FileOrString', str], kwargs: T.Dict[str, T.Any]) -> str:
+        if isinstance(args[0], File):
+            FeatureNew('fs.replace_suffix_file', '0.59.0').use(state.subproject)
+        original = PurePath(str(args[0]))
+        new = original.with_suffix(args[1])
+        return str(new)
+
+    @noKwargs
+    @typed_pos_args('fs.parent', (str, File))
+    def parent(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> str:
+        if isinstance(args[0], File):
+            FeatureNew('fs.parent_file', '0.59.0').use(state.subproject)
+        original = PurePath(str(args[0]))
+        new = original.parent
+        return str(new)
+
+    @noKwargs
+    @typed_pos_args('fs.name', (str, File))
+    def name(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> str:
+        if isinstance(args[0], File):
+            FeatureNew('fs.name_file', '0.59.0').use(state.subproject)
+        original = PurePath(str(args[0]))
+        new = original.name
+        return str(new)
+
+    @noKwargs
+    @typed_pos_args('fs.stem', (str, File))
+    @FeatureNew('fs.stem', '0.54.0')
+    def stem(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> str:
+        if isinstance(args[0], File):
+            FeatureNew('fs.stem_file', '0.59.0').use(state.subproject)
+        original = PurePath(str(args[0]))
+        new = original.stem
+        return str(new)
+
+    @FeatureNew('fs.read', '0.57.0')
+    @typed_pos_args('fs.read', (str, File))
+    @typed_kwargs('fs.read', KwargInfo('encoding', str, default='utf-8'))
+    def read(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: 'ReadKwArgs') -> str:
+        """Read a file from the source tree and return its value as a decoded
+        string.
+
+        If the encoding is not specified, the file is assumed to be utf-8
+        encoded. Paths must be relative by default (to prevent accidents) and
+        are forbidden to be read from the build directory (to prevent build
+        loops)
+        """
+        path = args[0]
+        encoding = kwargs['encoding']
+        src_dir = self.interpreter.environment.source_dir
+        sub_dir = self.interpreter.subdir
+        build_dir = self.interpreter.environment.get_build_dir()
+
+        if isinstance(path, File):
+            if path.is_built:
+                raise MesonException(
+                    'fs.read_file does not accept built files() objects')
+            path = os.path.join(src_dir, path.relative_name())
+        else:
+            if sub_dir:
+                src_dir = os.path.join(src_dir, sub_dir)
+            path = os.path.join(src_dir, path)
+
+        path = os.path.abspath(path)
+        if path_is_in_root(Path(path), Path(build_dir), resolve=True):
+            raise MesonException('path must not be in the build tree')
+        try:
+            with open(path, encoding=encoding) as f:
+                data = f.read()
+        except UnicodeDecodeError:
+            raise MesonException(f'decoding failed for {path}')
+        # Reconfigure when this file changes as it can contain data used by any
+        # part of the build configuration (e.g. `project(..., version:
+        # fs.read_file('VERSION')` or `configure_file(...)`
+        self.interpreter.add_build_def_file(path)
+        return data
+
+    @FeatureNew('fs.copyfile', '0.64.0')
+    @typed_pos_args('fs.copyfile', (File, str), optargs=[str])
+    @typed_kwargs(
+        'fs.copyfile',
+        INSTALL_KW,
+        INSTALL_MODE_KW,
+        INSTALL_TAG_KW,
+        KwargInfo('install_dir', (str, NoneType)),
+    )
+    def copyfile(self, state: ModuleState, args: T.Tuple[FileOrString, T.Optional[str]],
+                 kwargs: CopyKw) -> ModuleReturnValue:
+        """Copy a file into the build directory at build time."""
+        if kwargs['install'] and not kwargs['install_dir']:
+            raise InvalidArguments('"install_dir" must be specified when "install" is true')
+
+        src = self.interpreter.source_strings_to_files([args[0]])[0]
+
+        # The input is allowed to have path separators, but the output may not,
+        # so use the basename for the default case
+        dest = args[1] if args[1] else os.path.basename(src.fname)
+        if has_path_sep(dest):
+            raise InvalidArguments('Destination path may not have path separators')
+
+        ct = CustomTarget(
+            dest,
+            state.subdir,
+            state.subproject,
+            state.environment,
+            state.environment.get_build_command() + ['--internal', 'copy', '@INPUT@', '@OUTPUT@'],
+            [src],
+            [dest],
+            build_by_default=True,
+            install=kwargs['install'],
+            install_dir=[kwargs['install_dir']],
+            install_mode=kwargs['install_mode'],
+            install_tag=[kwargs['install_tag']],
+            backend=state.backend,
+        )
+
+        return ModuleReturnValue(ct, [ct])
+
+
+def initialize(*args: T.Any, **kwargs: T.Any) -> FSModule:
+    return FSModule(*args, **kwargs)
diff --git a/vendored-meson/meson/mesonbuild/modules/gnome.py b/vendored-meson/meson/mesonbuild/modules/gnome.py
new file mode 100644
index 000000000000..7a9acea0009f
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/gnome.py
@@ -0,0 +1,2163 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+'''This module provides helper functions for Gnome/GLib related
+functionality such as gobject-introspection, gresources and gtk-doc'''
+from __future__ import annotations
+
+import copy
+import itertools
+import functools
+import os
+import subprocess
+import textwrap
+import typing as T
+
+from . import (
+    ExtensionModule, GirTarget, GResourceHeaderTarget, GResourceTarget, ModuleInfo,
+    ModuleReturnValue, TypelibTarget, VapiTarget,
+)
+from .. import build
+from .. import interpreter
+from .. import mesonlib
+from .. import mlog
+from ..build import CustomTarget, CustomTargetIndex, Executable, GeneratedList, InvalidArguments
+from ..dependencies import Dependency, InternalDependency
+from ..dependencies.pkgconfig import PkgConfigDependency
+from ..interpreter.type_checking import DEPENDS_KW, DEPEND_FILES_KW, ENV_KW, INSTALL_DIR_KW, INSTALL_KW, NoneType, SOURCES_KW, in_set_validator
+from ..interpreterbase import noPosargs, noKwargs, FeatureNew, FeatureDeprecated
+from ..interpreterbase import typed_kwargs, KwargInfo, ContainerTypeInfo
+from ..interpreterbase.decorators import typed_pos_args
+from ..mesonlib import (
+    MachineChoice, MesonException, OrderedSet, Popen_safe, join_args, quote_arg
+)
+from ..programs import OverrideProgram
+from ..scripts.gettext import read_linguas
+
+if T.TYPE_CHECKING:
+    from typing_extensions import Literal, TypedDict
+
+    from . import ModuleState
+    from ..build import BuildTarget
+    from ..compilers import Compiler
+    from ..interpreter import Interpreter
+    from ..interpreterbase import TYPE_var, TYPE_kwargs
+    from ..mesonlib import FileOrString
+    from ..programs import ExternalProgram
+
+    class PostInstall(TypedDict):
+        glib_compile_schemas: bool
+        gio_querymodules: T.List[str]
+        gtk_update_icon_cache: bool
+        update_desktop_database: bool
+        update_mime_database: bool
+
+    class CompileSchemas(TypedDict):
+
+        build_by_default: bool
+        depend_files: T.List[FileOrString]
+
+    class Yelp(TypedDict):
+
+        languages: T.List[str]
+        media: T.List[str]
+        sources: T.List[str]
+        symlink_media: bool
+
+    class CompileResources(TypedDict):
+
+        build_by_default: bool
+        c_name: T.Optional[str]
+        dependencies: T.List[T.Union[mesonlib.File, build.CustomTarget, build.CustomTargetIndex]]
+        export: bool
+        extra_args: T.List[str]
+        gresource_bundle: bool
+        install: bool
+        install_dir: T.Optional[str]
+        install_header: bool
+        source_dir: T.List[str]
+
+    class GenerateGir(TypedDict):
+
+        build_by_default: bool
+        dependencies: T.List[Dependency]
+        export_packages: T.List[str]
+        extra_args: T.List[str]
+        fatal_warnings: bool
+        header: T.List[str]
+        identifier_prefix: T.List[str]
+        include_directories: T.List[T.Union[build.IncludeDirs, str]]
+        includes: T.List[T.Union[str, GirTarget]]
+        install: bool
+        install_dir_gir: T.Optional[str]
+        install_dir_typelib: T.Optional[str]
+        link_with: T.List[T.Union[build.SharedLibrary, build.StaticLibrary]]
+        namespace: str
+        nsversion: str
+        sources: T.List[T.Union[FileOrString, build.GeneratedTypes]]
+        symbol_prefix: T.List[str]
+
+    class GtkDoc(TypedDict):
+
+        src_dir: T.List[T.Union[str, build.IncludeDirs]]
+        main_sgml: str
+        main_xml: str
+        module_version: str
+        namespace: str
+        mode: Literal['xml', 'smgl', 'auto', 'none']
+        html_args: T.List[str]
+        scan_args: T.List[str]
+        scanobjs_args: T.List[str]
+        fixxref_args: T.List[str]
+        mkdb_args: T.List[str]
+        content_files: T.List[T.Union[build.GeneratedTypes, FileOrString]]
+        ignore_headers: T.List[str]
+        install_dir: T.List[str]
+        check: bool
+        install: bool
+        gobject_typesfile: T.List[FileOrString]
+        html_assets: T.List[FileOrString]
+        expand_content_files: T.List[FileOrString]
+        c_args: T.List[str]
+        include_directories: T.List[T.Union[str, build.IncludeDirs]]
+        dependencies: T.List[T.Union[Dependency, build.SharedLibrary, build.StaticLibrary]]
+
+    class GdbusCodegen(TypedDict):
+
+        sources: T.List[FileOrString]
+        extra_args: T.List[str]
+        interface_prefix: T.Optional[str]
+        namespace: T.Optional[str]
+        object_manager: bool
+        build_by_default: bool
+        annotations: T.List[T.List[str]]
+        install_header: bool
+        install_dir: T.Optional[str]
+        docbook: T.Optional[str]
+        autocleanup: Literal['all', 'none', 'objects', 'default']
+
+    class GenMarshal(TypedDict):
+
+        build_always: T.Optional[str]
+        build_always_stale: T.Optional[bool]
+        build_by_default: T.Optional[bool]
+        depend_files: T.List[mesonlib.File]
+        extra_args: T.List[str]
+        install_dir: T.Optional[str]
+        install_header: bool
+        internal: bool
+        nostdinc: bool
+        prefix: T.Optional[str]
+        skip_source: bool
+        sources: T.List[FileOrString]
+        stdinc: bool
+        valist_marshallers: bool
+
+    class GenerateVapi(TypedDict):
+
+        sources: T.List[T.Union[str, GirTarget]]
+        install_dir: T.Optional[str]
+        install: bool
+        vapi_dirs: T.List[str]
+        metadata_dirs: T.List[str]
+        gir_dirs: T.List[str]
+        packages: T.List[T.Union[str, InternalDependency]]
+
+    class _MkEnumsCommon(TypedDict):
+
+        sources: T.List[T.Union[FileOrString, build.GeneratedTypes]]
+        install_header: bool
+        install_dir: T.Optional[str]
+        identifier_prefix: T.Optional[str]
+        symbol_prefix: T.Optional[str]
+
+    class MkEnumsSimple(_MkEnumsCommon):
+
+        header_prefix: str
+        decorator: str
+        function_prefix: str
+        body_prefix: str
+
+    class MkEnums(_MkEnumsCommon):
+
+        c_template: T.Optional[FileOrString]
+        h_template: T.Optional[FileOrString]
+        comments: T.Optional[str]
+        eprod: T.Optional[str]
+        fhead: T.Optional[str]
+        fprod: T.Optional[str]
+        ftail: T.Optional[str]
+        vhead: T.Optional[str]
+        vprod: T.Optional[str]
+        vtail: T.Optional[str]
+        depends: T.List[T.Union[BuildTarget, CustomTarget, CustomTargetIndex]]
+
+
+# Differs from the CustomTarget version in that it straight defaults to True
+_BUILD_BY_DEFAULT: KwargInfo[bool] = KwargInfo(
+    'build_by_default', bool, default=True,
+)
+
+_EXTRA_ARGS_KW: KwargInfo[T.List[str]] = KwargInfo(
+    'extra_args',
+    ContainerTypeInfo(list, str),
+    default=[],
+    listify=True,
+)
+
+_MK_ENUMS_COMMON_KWS: T.List[KwargInfo] = [
+    INSTALL_KW.evolve(name='install_header'),
+    INSTALL_DIR_KW,
+    KwargInfo(
+        'sources',
+        ContainerTypeInfo(list, (str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)),
+        listify=True,
+        required=True,
+    ),
+    KwargInfo('identifier_prefix', (str, NoneType)),
+    KwargInfo('symbol_prefix', (str, NoneType)),
+]
+
+def annotations_validator(annotations: T.List[T.Union[str, T.List[str]]]) -> T.Optional[str]:
+    """Validate gdbus-codegen annotations argument"""
+
+    badlist = 'must be made up of 3 strings for ELEMENT, KEY, and VALUE'
+
+    if not annotations:
+        return None
+    elif all(isinstance(annot, str) for annot in annotations):
+        if len(annotations) == 3:
+            return None
+        else:
+            return badlist
+    elif not all(isinstance(annot, list) for annot in annotations):
+        for c, annot in enumerate(annotations):
+            if not isinstance(annot, list):
+                return f'element {c+1} must be a list'
+    else:
+        for c, annot in enumerate(annotations):
+            if len(annot) != 3 or not all(isinstance(i, str) for i in annot):
+                return f'element {c+1} {badlist}'
+    return None
+
+# gresource compilation is broken due to the way
+# the resource compiler and Ninja clash about it
+#
+# https://github.com/ninja-build/ninja/issues/1184
+# https://bugzilla.gnome.org/show_bug.cgi?id=774368
+gresource_dep_needed_version = '>= 2.51.1'
+
+class GnomeModule(ExtensionModule):
+
+    INFO = ModuleInfo('gnome')
+
+    def __init__(self, interpreter: 'Interpreter') -> None:
+        super().__init__(interpreter)
+        self.gir_dep: T.Optional[Dependency] = None
+        self.giscanner: T.Optional[T.Union[ExternalProgram, build.Executable, OverrideProgram]] = None
+        self.gicompiler: T.Optional[T.Union[ExternalProgram, build.Executable, OverrideProgram]] = None
+        self.install_glib_compile_schemas = False
+        self.install_gio_querymodules: T.List[str] = []
+        self.install_gtk_update_icon_cache = False
+        self.install_update_desktop_database = False
+        self.install_update_mime_database = False
+        self.devenv: T.Optional[build.EnvironmentVariables] = None
+        self.native_glib_version: T.Optional[str] = None
+        self.methods.update({
+            'post_install': self.post_install,
+            'compile_resources': self.compile_resources,
+            'generate_gir': self.generate_gir,
+            'compile_schemas': self.compile_schemas,
+            'yelp': self.yelp,
+            'gtkdoc': self.gtkdoc,
+            'gtkdoc_html_dir': self.gtkdoc_html_dir,
+            'gdbus_codegen': self.gdbus_codegen,
+            'mkenums': self.mkenums,
+            'mkenums_simple': self.mkenums_simple,
+            'genmarshal': self.genmarshal,
+            'generate_vapi': self.generate_vapi,
+        })
+
+    def _get_native_glib_version(self, state: 'ModuleState') -> str:
+        if self.native_glib_version is None:
+            glib_dep = PkgConfigDependency('glib-2.0', state.environment,
+                                           {'native': True, 'required': False})
+            if glib_dep.found():
+                self.native_glib_version = glib_dep.get_version()
+            else:
+                mlog.warning('Could not detect glib version, assuming 2.54. '
+                             'You may get build errors if your glib is older.')
+                self.native_glib_version = '2.54'
+        return self.native_glib_version
+
+    @mesonlib.run_once
+    def __print_gresources_warning(self, state: 'ModuleState') -> None:
+        if not mesonlib.version_compare(self._get_native_glib_version(state),
+                                        gresource_dep_needed_version):
+            mlog.warning('GLib compiled dependencies do not work reliably with \n'
+                         'the current version of GLib. See the following upstream issue:',
+                         mlog.bold('https://bugzilla.gnome.org/show_bug.cgi?id=774368'),
+                         once=True, fatal=False)
+
+    @staticmethod
+    def _print_gdbus_warning() -> None:
+        mlog.warning('Code generated with gdbus_codegen() requires the root directory be added to\n'
+                     '  include_directories of targets with GLib < 2.51.3:',
+                     mlog.bold('https://github.com/mesonbuild/meson/issues/1387'),
+                     once=True, fatal=False)
+
+    @typed_kwargs(
+        'gnome.post_install',
+        KwargInfo('glib_compile_schemas', bool, default=False),
+        KwargInfo('gio_querymodules', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo('gtk_update_icon_cache', bool, default=False),
+        KwargInfo('update_desktop_database', bool, default=False, since='0.59.0'),
+        KwargInfo('update_mime_database', bool, default=False, since='0.64.0'),
+    )
+    @noPosargs
+    @FeatureNew('gnome.post_install', '0.57.0')
+    def post_install(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: 'PostInstall') -> ModuleReturnValue:
+        rv: T.List['build.ExecutableSerialisation'] = []
+        datadir_abs = os.path.join(state.environment.get_prefix(), state.environment.get_datadir())
+        if kwargs['glib_compile_schemas'] and not self.install_glib_compile_schemas:
+            self.install_glib_compile_schemas = True
+            prog = state.find_tool('glib-compile-schemas', 'gio-2.0', 'glib_compile_schemas')
+            schemasdir = os.path.join(datadir_abs, 'glib-2.0', 'schemas')
+            script = state.backend.get_executable_serialisation([prog, schemasdir])
+            script.skip_if_destdir = True
+            rv.append(script)
+        for d in kwargs['gio_querymodules']:
+            if d not in self.install_gio_querymodules:
+                self.install_gio_querymodules.append(d)
+                prog = state.find_tool('gio-querymodules', 'gio-2.0', 'gio_querymodules')
+                moduledir = os.path.join(state.environment.get_prefix(), d)
+                script = state.backend.get_executable_serialisation([prog, moduledir])
+                script.skip_if_destdir = True
+                rv.append(script)
+        if kwargs['gtk_update_icon_cache'] and not self.install_gtk_update_icon_cache:
+            self.install_gtk_update_icon_cache = True
+            prog = state.find_program('gtk4-update-icon-cache', required=False)
+            found = isinstance(prog, build.Executable) or prog.found()
+            if not found:
+                prog = state.find_program('gtk-update-icon-cache')
+            icondir = os.path.join(datadir_abs, 'icons', 'hicolor')
+            script = state.backend.get_executable_serialisation([prog, '-q', '-t', '-f', icondir])
+            script.skip_if_destdir = True
+            rv.append(script)
+        if kwargs['update_desktop_database'] and not self.install_update_desktop_database:
+            self.install_update_desktop_database = True
+            prog = state.find_program('update-desktop-database')
+            appdir = os.path.join(datadir_abs, 'applications')
+            script = state.backend.get_executable_serialisation([prog, '-q', appdir])
+            script.skip_if_destdir = True
+            rv.append(script)
+        if kwargs['update_mime_database'] and not self.install_update_mime_database:
+            self.install_update_mime_database = True
+            prog = state.find_program('update-mime-database')
+            appdir = os.path.join(datadir_abs, 'mime')
+            script = state.backend.get_executable_serialisation([prog, appdir])
+            script.skip_if_destdir = True
+            rv.append(script)
+        return ModuleReturnValue(None, rv)
+
+    @typed_pos_args('gnome.compile_resources', str, (str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList))
+    @typed_kwargs(
+        'gnome.compile_resources',
+        _BUILD_BY_DEFAULT,
+        _EXTRA_ARGS_KW,
+        INSTALL_KW,
+        INSTALL_KW.evolve(name='install_header', since='0.37.0'),
+        INSTALL_DIR_KW,
+        KwargInfo('c_name', (str, NoneType)),
+        KwargInfo('dependencies', ContainerTypeInfo(list, (mesonlib.File, build.CustomTarget, build.CustomTargetIndex)), default=[], listify=True),
+        KwargInfo('export', bool, default=False, since='0.37.0'),
+        KwargInfo('gresource_bundle', bool, default=False, since='0.37.0'),
+        KwargInfo('source_dir', ContainerTypeInfo(list, str), default=[], listify=True),
+    )
+    def compile_resources(self, state: 'ModuleState', args: T.Tuple[str, 'FileOrString'],
+                          kwargs: 'CompileResources') -> 'ModuleReturnValue':
+        self.__print_gresources_warning(state)
+        glib_version = self._get_native_glib_version(state)
+
+        glib_compile_resources = state.find_program('glib-compile-resources')
+        cmd: T.List[T.Union[ExternalProgram, Executable, OverrideProgram, str]] = [glib_compile_resources, '@INPUT@']
+
+        source_dirs = kwargs['source_dir']
+        dependencies = kwargs['dependencies']
+
+        target_name, input_file = args
+
+        # Validate dependencies
+        subdirs: T.List[str] = []
+        depends: T.List[T.Union[build.CustomTarget, build.CustomTargetIndex]] = []
+        for dep in dependencies:
+            if isinstance(dep, mesonlib.File):
+                subdirs.append(dep.subdir)
+            else:
+                depends.append(dep)
+                subdirs.append(dep.get_subdir())
+                if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
+                    m = 'The "dependencies" argument of gnome.compile_resources() cannot\n' \
+                        'be used with the current version of glib-compile-resources due to\n' \
+                        ''
+                    raise MesonException(m)
+
+        if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
+            # Resource xml files generated at build-time cannot be used with
+            # gnome.compile_resources() because we need to scan the xml for
+            # dependencies. Use configure_file() instead to generate it at
+            # configure-time
+            if isinstance(input_file, mesonlib.File):
+                # glib-compile-resources will be run inside the source dir,
+                # so we need either 'src_to_build' or the absolute path.
+                # Absolute path is the easiest choice.
+                if input_file.is_built:
+                    ifile = os.path.join(state.environment.get_build_dir(), input_file.subdir, input_file.fname)
+                else:
+                    ifile = os.path.join(input_file.subdir, input_file.fname)
+
+            elif isinstance(input_file, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)):
+                raise MesonException('Resource xml files generated at build-time cannot be used with '
+                                     'gnome.compile_resources() in the current version of glib-compile-resources '
+                                     'because we need to scan the xml for dependencies due to '
+                                     '\nUse '
+                                     'configure_file() instead to generate it at configure-time.')
+            else:
+                ifile = os.path.join(state.subdir, input_file)
+
+            depend_files, depends, subdirs = self._get_gresource_dependencies(
+                state, ifile, source_dirs, dependencies)
+
+        # Make source dirs relative to build dir now
+        source_dirs = [os.path.join(state.build_to_src, state.subdir, d) for d in source_dirs]
+        # Ensure build directories of generated deps are included
+        source_dirs += subdirs
+        # Always include current directory, but after paths set by user
+        source_dirs.append(os.path.join(state.build_to_src, state.subdir))
+
+        for source_dir in OrderedSet(source_dirs):
+            cmd += ['--sourcedir', source_dir]
+
+        if kwargs['c_name']:
+            cmd += ['--c-name', kwargs['c_name']]
+        if not kwargs['export']:
+            cmd += ['--internal']
+
+        cmd += ['--generate', '--target', '@OUTPUT@']
+        cmd += kwargs['extra_args']
+
+        gresource = kwargs['gresource_bundle']
+        if gresource:
+            output = f'{target_name}.gresource'
+            name = f'{target_name}_gresource'
+        else:
+            if 'c' in state.environment.coredata.compilers.host:
+                output = f'{target_name}.c'
+                name = f'{target_name}_c'
+            elif 'cpp' in state.environment.coredata.compilers.host:
+                output = f'{target_name}.cpp'
+                name = f'{target_name}_cpp'
+            else:
+                raise MesonException('Compiling GResources into code is only supported in C and C++ projects')
+
+        if kwargs['install'] and not gresource:
+            raise MesonException('The install kwarg only applies to gresource bundles, see install_header')
+
+        install_header = kwargs['install_header']
+        if install_header and gresource:
+            raise MesonException('The install_header kwarg does not apply to gresource bundles')
+        if install_header and not kwargs['export']:
+            raise MesonException('GResource header is installed yet export is not enabled')
+
+        depfile: T.Optional[str] = None
+        target_cmd: T.List[T.Union[ExternalProgram, Executable, OverrideProgram, str]]
+        if not mesonlib.version_compare(glib_version, gresource_dep_needed_version):
+            # This will eventually go out of sync if dependencies are added
+            target_cmd = cmd
+        else:
+            depfile = f'{output}.d'
+            depend_files = []
+            target_cmd = copy.copy(cmd) + ['--dependency-file', '@DEPFILE@']
+        target_c = GResourceTarget(
+            name,
+            state.subdir,
+            state.subproject,
+            state.environment,
+            target_cmd,
+            [input_file],
+            [output],
+            build_by_default=kwargs['build_by_default'],
+            depfile=depfile,
+            depend_files=depend_files,
+            extra_depends=depends,
+            install=kwargs['install'],
+            install_dir=[kwargs['install_dir']] if kwargs['install_dir'] else [],
+            install_tag=['runtime'],
+        )
+
+        if gresource: # Only one target for .gresource files
+            return ModuleReturnValue(target_c, [target_c])
+
+        install_dir = kwargs['install_dir'] or state.environment.coredata.get_option(mesonlib.OptionKey('includedir'))
+        assert isinstance(install_dir, str), 'for mypy'
+        target_h = GResourceHeaderTarget(
+            f'{target_name}_h',
+            state.subdir,
+            state.subproject,
+            state.environment,
+            cmd,
+            [input_file],
+            [f'{target_name}.h'],
+            build_by_default=kwargs['build_by_default'],
+            extra_depends=depends,
+            install=install_header,
+            install_dir=[install_dir],
+            install_tag=['devel'],
+        )
+        rv = [target_c, target_h]
+        return ModuleReturnValue(rv, rv)
+
+    @staticmethod
+    def _get_gresource_dependencies(
+            state: 'ModuleState', input_file: str, source_dirs: T.List[str],
+            dependencies: T.Sequence[T.Union[mesonlib.File, build.CustomTarget, build.CustomTargetIndex]]
+            ) -> T.Tuple[T.List[mesonlib.FileOrString], T.List[T.Union[build.CustomTarget, build.CustomTargetIndex]], T.List[str]]:
+
+        cmd = ['glib-compile-resources',
+               input_file,
+               '--generate-dependencies']
+
+        # Prefer generated files over source files
+        cmd += ['--sourcedir', state.subdir] # Current build dir
+        for source_dir in source_dirs:
+            cmd += ['--sourcedir', os.path.join(state.subdir, source_dir)]
+
+        try:
+            pc, stdout, stderr = Popen_safe(cmd, cwd=state.environment.get_source_dir())
+        except (FileNotFoundError, PermissionError):
+            raise MesonException('Could not execute glib-compile-resources.')
+        if pc.returncode != 0:
+            m = f'glib-compile-resources failed to get dependencies for {cmd[1]}:\n{stderr}'
+            mlog.warning(m)
+            raise subprocess.CalledProcessError(pc.returncode, cmd)
+
+        raw_dep_files: T.List[str] = stdout.split('\n')[:-1]
+
+        depends: T.List[T.Union[build.CustomTarget, build.CustomTargetIndex]] = []
+        subdirs: T.List[str] = []
+        dep_files: T.List[mesonlib.FileOrString] = []
+        for resfile in raw_dep_files.copy():
+            resbasename = os.path.basename(resfile)
+            for dep in dependencies:
+                if isinstance(dep, mesonlib.File):
+                    if dep.fname != resbasename:
+                        continue
+                    raw_dep_files.remove(resfile)
+                    dep_files.append(dep)
+                    subdirs.append(dep.subdir)
+                    break
+                elif isinstance(dep, (build.CustomTarget, build.CustomTargetIndex)):
+                    fname = None
+                    outputs = {(o, os.path.basename(o)) for o in dep.get_outputs()}
+                    for o, baseo in outputs:
+                        if baseo == resbasename:
+                            fname = o
+                            break
+                    if fname is not None:
+                        raw_dep_files.remove(resfile)
+                        depends.append(dep)
+                        subdirs.append(dep.get_subdir())
+                        break
+            else:
+                # In generate-dependencies mode, glib-compile-resources doesn't raise
+                # an error for missing resources but instead prints whatever filename
+                # was listed in the input file.  That's good because it means we can
+                # handle resource files that get generated as part of the build, as
+                # follows.
+                #
+                # If there are multiple generated resource files with the same basename
+                # then this code will get confused.
+                try:
+                    f = mesonlib.File.from_source_file(state.environment.get_source_dir(),
+                                                       ".", resfile)
+                except MesonException:
+                    raise MesonException(
+                        f'Resource "{resfile}" listed in "{input_file}" was not found. '
+                        'If this is a generated file, pass the target that generates '
+                        'it to gnome.compile_resources() using the "dependencies" '
+                        'keyword argument.')
+                raw_dep_files.remove(resfile)
+                dep_files.append(f)
+        dep_files.extend(raw_dep_files)
+        return dep_files, depends, subdirs
+
+    def _get_link_args(self, state: 'ModuleState',
+                       lib: T.Union[build.SharedLibrary, build.StaticLibrary],
+                       depends: T.Sequence[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]],
+                       include_rpath: bool = False,
+                       use_gir_args: bool = False
+                       ) -> T.Tuple[T.List[str], T.List[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]]]:
+        link_command: T.List[str] = []
+        new_depends = list(depends)
+        # Construct link args
+        if isinstance(lib, build.SharedLibrary):
+            libdir = os.path.join(state.environment.get_build_dir(), state.backend.get_target_dir(lib))
+            link_command.append('-L' + libdir)
+            if include_rpath:
+                link_command.append('-Wl,-rpath,' + libdir)
+            new_depends.append(lib)
+            # Needed for the following binutils bug:
+            # https://github.com/mesonbuild/meson/issues/1911
+            # However, g-ir-scanner does not understand -Wl,-rpath
+            # so we need to use -L instead
+            for d in state.backend.determine_rpath_dirs(lib):
+                d = os.path.join(state.environment.get_build_dir(), d)
+                link_command.append('-L' + d)
+                if include_rpath:
+                    link_command.append('-Wl,-rpath,' + d)
+        if use_gir_args and self._gir_has_option('--extra-library'):
+            link_command.append('--extra-library=' + lib.name)
+        else:
+            link_command.append('-l' + lib.name)
+        return link_command, new_depends
+
+    def _get_dependencies_flags_raw(
+            self, deps: T.Sequence[T.Union['Dependency', build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]],
+            state: 'ModuleState',
+            depends: T.Sequence[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]],
+            include_rpath: bool,
+            use_gir_args: bool,
+            ) -> T.Tuple[OrderedSet[str], OrderedSet[T.Union[str, T.Tuple[str, str]]], OrderedSet[T.Union[str, T.Tuple[str, str]]], OrderedSet[str],
+                         T.List[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]]]:
+        cflags: OrderedSet[str] = OrderedSet()
+        # External linker flags that can't be de-duped reliably because they
+        # require two args in order, such as -framework AVFoundation will be stored as a tuple.
+        internal_ldflags: OrderedSet[T.Union[str, T.Tuple[str, str]]] = OrderedSet()
+        external_ldflags: OrderedSet[T.Union[str, T.Tuple[str, str]]] = OrderedSet()
+        gi_includes: OrderedSet[str] = OrderedSet()
+        deps = mesonlib.listify(deps)
+        depends = list(depends)
+
+        for dep in deps:
+            if isinstance(dep, Dependency):
+                girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='')
+                if girdir:
+                    assert isinstance(girdir, str), 'for mypy'
+                    gi_includes.update([girdir])
+            if isinstance(dep, InternalDependency):
+                cflags.update(dep.get_compile_args())
+                cflags.update(state.get_include_args(dep.include_directories))
+                for lib in dep.libraries:
+                    if isinstance(lib, build.SharedLibrary):
+                        _ld, depends = self._get_link_args(state, lib, depends, include_rpath)
+                        internal_ldflags.update(_ld)
+                        libdepflags = self._get_dependencies_flags_raw(lib.get_external_deps(), state, depends, include_rpath,
+                                                                       use_gir_args)
+                        cflags.update(libdepflags[0])
+                        internal_ldflags.update(libdepflags[1])
+                        external_ldflags.update(libdepflags[2])
+                        gi_includes.update(libdepflags[3])
+                        depends = libdepflags[4]
+                extdepflags = self._get_dependencies_flags_raw(dep.ext_deps, state, depends, include_rpath,
+                                                               use_gir_args)
+                cflags.update(extdepflags[0])
+                internal_ldflags.update(extdepflags[1])
+                external_ldflags.update(extdepflags[2])
+                gi_includes.update(extdepflags[3])
+                depends = extdepflags[4]
+                for source in dep.sources:
+                    if isinstance(source, GirTarget):
+                        gi_includes.update([os.path.join(state.environment.get_build_dir(),
+                                            source.get_subdir())])
+            # This should be any dependency other than an internal one.
+            elif isinstance(dep, Dependency):
+                cflags.update(dep.get_compile_args())
+                ldflags = iter(dep.get_link_args(raw=True))
+                for flag in ldflags:
+                    if (os.path.isabs(flag) and
+                            # For PkgConfigDependency only:
+                            getattr(dep, 'is_libtool', False)):
+                        lib_dir = os.path.dirname(flag)
+                        external_ldflags.update([f'-L{lib_dir}'])
+                        if include_rpath:
+                            external_ldflags.update([f'-Wl,-rpath {lib_dir}'])
+                        libname = os.path.basename(flag)
+                        if libname.startswith("lib"):
+                            libname = libname[3:]
+                        libname = libname.split(".so")[0]
+                        flag = f"-l{libname}"
+                    # FIXME: Hack to avoid passing some compiler options in
+                    if flag.startswith("-W"):
+                        continue
+                    # If it's a framework arg, slurp the framework name too
+                    # to preserve the order of arguments
+                    if flag == '-framework':
+                        external_ldflags.update([(flag, next(ldflags))])
+                    else:
+                        external_ldflags.update([flag])
+            elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)):
+                cflags.update(state.get_include_args(dep.get_include_dirs()))
+                depends.append(dep)
+            else:
+                mlog.log(f'dependency {dep!r} not handled to build gir files')
+                continue
+
+        if use_gir_args and self._gir_has_option('--extra-library'):
+            def fix_ldflags(ldflags: T.Iterable[T.Union[str, T.Tuple[str, str]]]) -> OrderedSet[T.Union[str, T.Tuple[str, str]]]:
+                fixed_ldflags: OrderedSet[T.Union[str, T.Tuple[str, str]]] = OrderedSet()
+                for ldflag in ldflags:
+                    if isinstance(ldflag, str) and ldflag.startswith("-l"):
+                        ldflag = ldflag.replace('-l', '--extra-library=', 1)
+                    fixed_ldflags.add(ldflag)
+                return fixed_ldflags
+            internal_ldflags = fix_ldflags(internal_ldflags)
+            external_ldflags = fix_ldflags(external_ldflags)
+        return cflags, internal_ldflags, external_ldflags, gi_includes, depends
+
+    def _get_dependencies_flags(
+            self, deps: T.Sequence[T.Union['Dependency', build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]],
+            state: 'ModuleState',
+            depends: T.Sequence[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]],
+            include_rpath: bool = False,
+            use_gir_args: bool = False,
+            ) -> T.Tuple[OrderedSet[str], T.List[str], T.List[str], OrderedSet[str],
+                         T.List[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]]]:
+
+        cflags, internal_ldflags_raw, external_ldflags_raw, gi_includes, depends = self._get_dependencies_flags_raw(deps, state, depends, include_rpath, use_gir_args)
+        internal_ldflags: T.List[str] = []
+        external_ldflags: T.List[str] = []
+
+        # Extract non-deduplicable argument groups out of the tuples.
+        for ldflag in internal_ldflags_raw:
+            if isinstance(ldflag, str):
+                internal_ldflags.append(ldflag)
+            else:
+                internal_ldflags.extend(ldflag)
+        for ldflag in external_ldflags_raw:
+            if isinstance(ldflag, str):
+                external_ldflags.append(ldflag)
+            else:
+                external_ldflags.extend(ldflag)
+
+        return cflags, internal_ldflags, external_ldflags, gi_includes, depends
+
+    def _unwrap_gir_target(self, girtarget: T.Union[build.Executable, build.StaticLibrary, build.SharedLibrary], state: 'ModuleState'
+                           ) -> T.Union[build.Executable, build.StaticLibrary, build.SharedLibrary]:
+        if not isinstance(girtarget, (build.Executable, build.SharedLibrary,
+                                      build.StaticLibrary)):
+            raise MesonException(f'Gir target must be an executable or library but is "{girtarget}" of type {type(girtarget).__name__}')
+
+        STATIC_BUILD_REQUIRED_VERSION = ">=1.58.1"
+        if isinstance(girtarget, (build.StaticLibrary)) and \
+           not mesonlib.version_compare(
+               self._get_gir_dep(state)[0].get_version(),
+               STATIC_BUILD_REQUIRED_VERSION):
+            raise MesonException('Static libraries can only be introspected with GObject-Introspection ' + STATIC_BUILD_REQUIRED_VERSION)
+
+        return girtarget
+
+    def _devenv_prepend(self, varname: str, value: str) -> None:
+        if self.devenv is None:
+            self.devenv = build.EnvironmentVariables()
+        self.devenv.prepend(varname, [value])
+
+    def postconf_hook(self, b: build.Build) -> None:
+        if self.devenv is not None:
+            b.devenv.append(self.devenv)
+
+    def _get_gir_dep(self, state: 'ModuleState') -> T.Tuple[Dependency, T.Union[build.Executable, 'ExternalProgram', 'OverrideProgram'],
+                                                            T.Union[build.Executable, 'ExternalProgram', 'OverrideProgram']]:
+        if not self.gir_dep:
+            self.gir_dep = state.dependency('gobject-introspection-1.0')
+            self.giscanner = state.find_tool('g-ir-scanner', 'gobject-introspection-1.0', 'g_ir_scanner')
+            self.gicompiler = state.find_tool('g-ir-compiler', 'gobject-introspection-1.0', 'g_ir_compiler')
+        return self.gir_dep, self.giscanner, self.gicompiler
+
+    @functools.lru_cache(maxsize=None)
+    def _gir_has_option(self, option: str) -> bool:
+        exe = self.giscanner
+        if isinstance(exe, OverrideProgram):
+            # Handle overridden g-ir-scanner
+            assert option in {'--extra-library', '--sources-top-dirs'}
+            return True
+        p, o, _ = Popen_safe(exe.get_command() + ['--help'], stderr=subprocess.STDOUT)
+        return p.returncode == 0 and option in o
+
+    # May mutate depends and gir_inc_dirs
+    @staticmethod
+    def _scan_include(state: 'ModuleState', includes: T.List[T.Union[str, GirTarget]]
+                      ) -> T.Tuple[T.List[str], T.List[str], T.List[GirTarget]]:
+        ret: T.List[str] = []
+        gir_inc_dirs: T.List[str] = []
+        depends: T.List[GirTarget] = []
+
+        for inc in includes:
+            if isinstance(inc, str):
+                ret += [f'--include={inc}']
+            elif isinstance(inc, GirTarget):
+                gir_inc_dirs .append(os.path.join(state.environment.get_build_dir(), inc.get_subdir()))
+                ret.append(f"--include-uninstalled={os.path.join(inc.get_subdir(), inc.get_basename())}")
+                depends.append(inc)
+
+        return ret, gir_inc_dirs, depends
+
+    @staticmethod
+    def _scan_langs(state: 'ModuleState', langs: T.Iterable[str]) -> T.List[str]:
+        ret: T.List[str] = []
+
+        for lang in langs:
+            link_args = state.environment.coredata.get_external_link_args(MachineChoice.HOST, lang)
+            for link_arg in link_args:
+                if link_arg.startswith('-L'):
+                    ret.append(link_arg)
+
+        return ret
+
+    @staticmethod
+    def _scan_gir_targets(state: 'ModuleState', girtargets: T.Sequence[build.BuildTarget]) -> T.List[T.Union[str, build.Executable]]:
+        ret: T.List[T.Union[str, build.Executable]] = []
+
+        for girtarget in girtargets:
+            if isinstance(girtarget, build.Executable):
+                ret += ['--program', girtarget]
+            else:
+                # Because of https://gitlab.gnome.org/GNOME/gobject-introspection/merge_requests/72
+                # we can't use the full path until this is merged.
+                libpath = os.path.join(girtarget.get_subdir(), girtarget.get_filename())
+                # Must use absolute paths here because g-ir-scanner will not
+                # add them to the runtime path list if they're relative. This
+                # means we cannot use @BUILD_ROOT@
+                build_root = state.environment.get_build_dir()
+                if isinstance(girtarget, build.SharedLibrary):
+                    # need to put our output directory first as we need to use the
+                    # generated libraries instead of any possibly installed system/prefix
+                    # ones.
+                    ret += ["-L{}/{}".format(build_root, os.path.dirname(libpath))]
+                    libname = girtarget.get_basename()
+                else:
+                    libname = os.path.join(f"{build_root}/{libpath}")
+                ret += ['--library', libname]
+                # Needed for the following binutils bug:
+                # https://github.com/mesonbuild/meson/issues/1911
+                # However, g-ir-scanner does not understand -Wl,-rpath
+                # so we need to use -L instead
+                for d in state.backend.determine_rpath_dirs(girtarget):
+                    d = os.path.join(state.environment.get_build_dir(), d)
+                    ret.append('-L' + d)
+
+        return ret
+
+    @staticmethod
+    def _get_girtargets_langs_compilers(girtargets: T.Sequence[build.BuildTarget]) -> T.List[T.Tuple[str, 'Compiler']]:
+        ret: T.List[T.Tuple[str, 'Compiler']] = []
+        for girtarget in girtargets:
+            for lang, compiler in girtarget.compilers.items():
+                # XXX: Can you use g-i with any other language?
+                if lang in {'c', 'cpp', 'objc', 'objcpp', 'd'}:
+                    ret.append((lang, compiler))
+                    break
+
+        return ret
+
+    @staticmethod
+    def _get_gir_targets_deps(girtargets: T.Sequence[build.BuildTarget]
+                              ) -> T.List[T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex, Dependency]]:
+        ret: T.List[T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex, Dependency]] = []
+        for girtarget in girtargets:
+            ret += girtarget.get_all_link_deps()
+            ret += girtarget.get_external_deps()
+        return ret
+
+    @staticmethod
+    def _get_gir_targets_inc_dirs(girtargets: T.Sequence[build.BuildTarget]) -> OrderedSet[build.IncludeDirs]:
+        ret: OrderedSet = OrderedSet()
+        for girtarget in girtargets:
+            ret.update(girtarget.get_include_dirs())
+        return ret
+
+    @staticmethod
+    def _get_langs_compilers_flags(state: 'ModuleState', langs_compilers: T.List[T.Tuple[str, 'Compiler']]
+                                   ) -> T.Tuple[T.List[str], T.List[str], T.List[str]]:
+        cflags: T.List[str] = []
+        internal_ldflags: T.List[str] = []
+        external_ldflags: T.List[str] = []
+
+        for lang, compiler in langs_compilers:
+            if state.global_args.get(lang):
+                cflags += state.global_args[lang]
+            if state.project_args.get(lang):
+                cflags += state.project_args[lang]
+            if mesonlib.OptionKey('b_sanitize') in compiler.base_options:
+                sanitize = state.environment.coredata.options[mesonlib.OptionKey('b_sanitize')].value
+                cflags += compiler.sanitizer_compile_args(sanitize)
+                sanitize = sanitize.split(',')
+                # These must be first in ldflags
+                if 'address' in sanitize:
+                    internal_ldflags += ['-lasan']
+                if 'thread' in sanitize:
+                    internal_ldflags += ['-ltsan']
+                if 'undefined' in sanitize:
+                    internal_ldflags += ['-lubsan']
+                # FIXME: Linking directly to lib*san is not recommended but g-ir-scanner
+                # does not understand -f LDFLAGS. https://bugzilla.gnome.org/show_bug.cgi?id=783892
+                # ldflags += compiler.sanitizer_link_args(sanitize)
+
+        return cflags, internal_ldflags, external_ldflags
+
+    @staticmethod
+    def _make_gir_filelist(state: 'ModuleState', srcdir: str, ns: str,
+                           nsversion: str, girtargets: T.Sequence[build.BuildTarget],
+                           libsources: T.Sequence[T.Union[
+                               str, mesonlib.File, build.GeneratedList,
+                               build.CustomTarget, build.CustomTargetIndex]]
+                           ) -> str:
+        gir_filelist_dir = state.backend.get_target_private_dir_abs(girtargets[0])
+        if not os.path.isdir(gir_filelist_dir):
+            os.mkdir(gir_filelist_dir)
+        gir_filelist_filename = os.path.join(gir_filelist_dir, f'{ns}_{nsversion}_gir_filelist')
+
+        with open(gir_filelist_filename, 'w', encoding='utf-8') as gir_filelist:
+            for s in libsources:
+                if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)):
+                    for custom_output in s.get_outputs():
+                        gir_filelist.write(os.path.join(state.environment.get_build_dir(),
+                                                        state.backend.get_target_dir(s),
+                                                        custom_output) + '\n')
+                elif isinstance(s, mesonlib.File):
+                    gir_filelist.write(s.rel_to_builddir(state.build_to_src) + '\n')
+                elif isinstance(s, build.GeneratedList):
+                    for gen_src in s.get_outputs():
+                        gir_filelist.write(os.path.join(srcdir, gen_src) + '\n')
+                else:
+                    gir_filelist.write(os.path.join(srcdir, s) + '\n')
+
+        return gir_filelist_filename
+
+    @staticmethod
+    def _make_gir_target(
+            state: 'ModuleState',
+            girfile: str,
+            scan_command: T.Sequence[T.Union['FileOrString', Executable, ExternalProgram, OverrideProgram]],
+            generated_files: T.Sequence[T.Union[str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]],
+            depends: T.Sequence[T.Union['FileOrString', build.BuildTarget, 'build.GeneratedTypes', build.StructuredSources]],
+            kwargs: T.Dict[str, T.Any]) -> GirTarget:
+        install = kwargs['install_gir']
+        if install is None:
+            install = kwargs['install']
+
+        install_dir = kwargs['install_dir_gir']
+        if install_dir is None:
+            install_dir = os.path.join(state.environment.get_datadir(), 'gir-1.0')
+        elif install_dir is False:
+            install = False
+
+        # g-ir-scanner uses pkg-config to find libraries such as glib. They could
+        # be built as subproject in which case we need to trick it to use
+        # -uninstalled.pc files Meson generated. It also must respect pkgconfig
+        # settings user could have set in machine file, like PKG_CONFIG_LIBDIR,
+        # SYSROOT, etc.
+        run_env = PkgConfigDependency.get_env(state.environment, MachineChoice.HOST, uninstalled=True)
+        # g-ir-scanner uses Python's distutils to find the compiler, which uses 'CC'
+        cc_exelist = state.environment.coredata.compilers.host['c'].get_exelist()
+        run_env.set('CC', [quote_arg(x) for x in cc_exelist], ' ')
+        run_env.merge(kwargs['env'])
+
+        return GirTarget(
+            girfile,
+            state.subdir,
+            state.subproject,
+            state.environment,
+            scan_command,
+            generated_files,
+            [girfile],
+            build_by_default=kwargs['build_by_default'],
+            extra_depends=depends,
+            install=install,
+            install_dir=[install_dir],
+            install_tag=['devel'],
+            env=run_env,
+        )
+
+    @staticmethod
+    def _make_typelib_target(state: 'ModuleState', typelib_output: str,
+                             typelib_cmd: T.Sequence[T.Union[str, build.Executable, ExternalProgram, build.CustomTarget]],
+                             generated_files: T.Sequence[T.Union[str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]],
+                             kwargs: T.Dict[str, T.Any]) -> TypelibTarget:
+        install = kwargs['install_typelib']
+        if install is None:
+            install = kwargs['install']
+
+        install_dir = kwargs['install_dir_typelib']
+        if install_dir is None:
+            install_dir = os.path.join(state.environment.get_libdir(), 'girepository-1.0')
+        elif install_dir is False:
+            install = False
+
+        return TypelibTarget(
+            typelib_output,
+            state.subdir,
+            state.subproject,
+            state.environment,
+            typelib_cmd,
+            generated_files,
+            [typelib_output],
+            install=install,
+            install_dir=[install_dir],
+            install_tag=['typelib'],
+            build_by_default=kwargs['build_by_default'],
+            env=kwargs['env'],
+        )
+
+    @staticmethod
+    def _gather_typelib_includes_and_update_depends(
+            state: 'ModuleState',
+            deps: T.Sequence[T.Union[Dependency, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]],
+            depends: T.Sequence[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]]
+            ) -> T.Tuple[T.List[str], T.List[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]]]:
+        # Need to recursively add deps on GirTarget sources from our
+        # dependencies and also find the include directories needed for the
+        # typelib generation custom target below.
+        typelib_includes: T.List[str] = []
+        new_depends = list(depends)
+        for dep in deps:
+            # Add a dependency on each GirTarget listed in dependencies and add
+            # the directory where it will be generated to the typelib includes
+            if isinstance(dep, InternalDependency):
+                for source in dep.sources:
+                    if isinstance(source, GirTarget) and source not in depends:
+                        new_depends.append(source)
+                        subdir = os.path.join(state.environment.get_build_dir(),
+                                              source.get_subdir())
+                        if subdir not in typelib_includes:
+                            typelib_includes.append(subdir)
+            # Do the same, but for dependencies of dependencies. These are
+            # stored in the list of generated sources for each link dep (from
+            # girtarget.get_all_link_deps() above).
+            # FIXME: Store this in the original form from declare_dependency()
+            # so it can be used here directly.
+            elif isinstance(dep, build.SharedLibrary):
+                for g_source in dep.generated:
+                    if isinstance(g_source, GirTarget):
+                        subdir = os.path.join(state.environment.get_build_dir(),
+                                              g_source.get_subdir())
+                        if subdir not in typelib_includes:
+                            typelib_includes.append(subdir)
+            if isinstance(dep, Dependency):
+                girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='')
+                assert isinstance(girdir, str), 'for mypy'
+                if girdir and girdir not in typelib_includes:
+                    typelib_includes.append(girdir)
+        return typelib_includes, new_depends
+
+    @staticmethod
+    def _get_external_args_for_langs(state: 'ModuleState', langs: T.List[str]) -> T.List[str]:
+        ret: T.List[str] = []
+        for lang in langs:
+            ret += mesonlib.listify(state.environment.coredata.get_external_args(MachineChoice.HOST, lang))
+        return ret
+
+    @staticmethod
+    def _get_scanner_cflags(cflags: T.Iterable[str]) -> T.Iterable[str]:
+        'g-ir-scanner only accepts -I/-D/-U; must ignore all other flags'
+        for f in cflags:
+            # _FORTIFY_SOURCE depends on / works together with -O, on the other hand this
+            # just invokes the preprocessor anyway
+            if f.startswith(('-D', '-U', '-I')) and not f.startswith('-D_FORTIFY_SOURCE'):
+                yield f
+
+    @staticmethod
+    def _get_scanner_ldflags(ldflags: T.Iterable[str]) -> T.Iterable[str]:
+        'g-ir-scanner only accepts -L/-l; must ignore -F and other linker flags'
+        for f in ldflags:
+            if f.startswith(('-L', '-l', '--extra-library')):
+                yield f
+
+    @typed_pos_args('gnome.generate_gir', varargs=(build.Executable, build.SharedLibrary, build.StaticLibrary), min_varargs=1)
+    @typed_kwargs(
+        'gnome.generate_gir',
+        INSTALL_KW,
+        _BUILD_BY_DEFAULT.evolve(since='0.40.0'),
+        _EXTRA_ARGS_KW,
+        ENV_KW.evolve(since='1.2.0'),
+        KwargInfo('dependencies', ContainerTypeInfo(list, Dependency), default=[], listify=True),
+        KwargInfo('export_packages', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo('fatal_warnings', bool, default=False, since='0.55.0'),
+        KwargInfo('header', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo('identifier_prefix', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo('include_directories', ContainerTypeInfo(list, (str, build.IncludeDirs)), default=[], listify=True),
+        KwargInfo('includes', ContainerTypeInfo(list, (str, GirTarget)), default=[], listify=True),
+        KwargInfo('install_gir', (bool, NoneType), since='0.61.0'),
+        KwargInfo('install_dir_gir', (str, bool, NoneType),
+                  deprecated_values={False: ('0.61.0', 'Use install_gir to disable installation')},
+                  validator=lambda x: 'as boolean can only be false' if x is True else None),
+        KwargInfo('install_typelib', (bool, NoneType), since='0.61.0'),
+        KwargInfo('install_dir_typelib', (str, bool, NoneType),
+                  deprecated_values={False: ('0.61.0', 'Use install_typelib to disable installation')},
+                  validator=lambda x: 'as boolean can only be false' if x is True else None),
+        KwargInfo('link_with', ContainerTypeInfo(list, (build.SharedLibrary, build.StaticLibrary)), default=[], listify=True),
+        KwargInfo('namespace', str, required=True),
+        KwargInfo('nsversion', str, required=True),
+        KwargInfo('sources', ContainerTypeInfo(list, (str, mesonlib.File, build.GeneratedList, build.CustomTarget, build.CustomTargetIndex)), default=[], listify=True),
+        KwargInfo('symbol_prefix', ContainerTypeInfo(list, str), default=[], listify=True),
+    )
+    def generate_gir(self, state: 'ModuleState', args: T.Tuple[T.List[T.Union[build.Executable, build.SharedLibrary, build.StaticLibrary]]],
+                     kwargs: 'GenerateGir') -> ModuleReturnValue:
+        girtargets = [self._unwrap_gir_target(arg, state) for arg in args[0]]
+        if len(girtargets) > 1 and any(isinstance(el, build.Executable) for el in girtargets):
+            raise MesonException('generate_gir only accepts a single argument when one of the arguments is an executable')
+
+        gir_dep, giscanner, gicompiler = self._get_gir_dep(state)
+
+        ns = kwargs['namespace']
+        nsversion = kwargs['nsversion']
+        libsources = kwargs['sources']
+
+        girfile = f'{ns}-{nsversion}.gir'
+        srcdir = os.path.join(state.environment.get_source_dir(), state.subdir)
+        builddir = os.path.join(state.environment.get_build_dir(), state.subdir)
+
+        depends: T.List[T.Union['FileOrString', 'build.GeneratedTypes', build.BuildTarget, build.StructuredSources]] = []
+        depends.extend(gir_dep.sources)
+        depends.extend(girtargets)
+
+        langs_compilers = self._get_girtargets_langs_compilers(girtargets)
+        cflags, internal_ldflags, external_ldflags = self._get_langs_compilers_flags(state, langs_compilers)
+        deps = self._get_gir_targets_deps(girtargets)
+        deps += kwargs['dependencies']
+        deps += [gir_dep]
+        typelib_includes, depends = self._gather_typelib_includes_and_update_depends(state, deps, depends)
+        # ldflags will be misinterpreted by gir scanner (showing
+        # spurious dependencies) but building GStreamer fails if they
+        # are not used here.
+        dep_cflags, dep_internal_ldflags, dep_external_ldflags, gi_includes, depends = \
+            self._get_dependencies_flags(deps, state, depends, use_gir_args=True)
+        scan_cflags = []
+        scan_cflags += list(self._get_scanner_cflags(cflags))
+        scan_cflags += list(self._get_scanner_cflags(dep_cflags))
+        scan_cflags += list(self._get_scanner_cflags(self._get_external_args_for_langs(state, [lc[0] for lc in langs_compilers])))
+        scan_internal_ldflags = []
+        scan_internal_ldflags += list(self._get_scanner_ldflags(internal_ldflags))
+        scan_internal_ldflags += list(self._get_scanner_ldflags(dep_internal_ldflags))
+        scan_external_ldflags = []
+        scan_external_ldflags += list(self._get_scanner_ldflags(external_ldflags))
+        scan_external_ldflags += list(self._get_scanner_ldflags(dep_external_ldflags))
+        girtargets_inc_dirs = self._get_gir_targets_inc_dirs(girtargets)
+        inc_dirs = kwargs['include_directories']
+
+        gir_inc_dirs: T.List[str] = []
+
+        scan_command: T.List[T.Union[str, build.Executable, 'ExternalProgram', 'OverrideProgram']] = [giscanner]
+        scan_command += ['--quiet']
+        scan_command += ['--no-libtool']
+        scan_command += ['--namespace=' + ns, '--nsversion=' + nsversion]
+        scan_command += ['--warn-all']
+        scan_command += ['--output', '@OUTPUT@']
+        scan_command += [f'--c-include={h}' for h in kwargs['header']]
+        scan_command += kwargs['extra_args']
+        scan_command += ['-I' + srcdir, '-I' + builddir]
+        scan_command += state.get_include_args(girtargets_inc_dirs)
+        scan_command += ['--filelist=' + self._make_gir_filelist(state, srcdir, ns, nsversion, girtargets, libsources)]
+        for l in kwargs['link_with']:
+            _cflags, depends = self._get_link_args(state, l, depends, use_gir_args=True)
+            scan_command.extend(_cflags)
+        _cmd, _ginc, _deps = self._scan_include(state, kwargs['includes'])
+        scan_command.extend(_cmd)
+        gir_inc_dirs.extend(_ginc)
+        depends.extend(_deps)
+
+        scan_command += [f'--symbol-prefix={p}' for p in kwargs['symbol_prefix']]
+        scan_command += [f'--identifier-prefix={p}' for p in kwargs['identifier_prefix']]
+        scan_command += [f'--pkg-export={p}' for p in kwargs['export_packages']]
+        scan_command += ['--cflags-begin']
+        scan_command += scan_cflags
+        scan_command += ['--cflags-end']
+        scan_command += state.get_include_args(inc_dirs)
+        scan_command += state.get_include_args(itertools.chain(gi_includes, gir_inc_dirs, inc_dirs), prefix='--add-include-path=')
+        scan_command += list(scan_internal_ldflags)
+        scan_command += self._scan_gir_targets(state, girtargets)
+        scan_command += self._scan_langs(state, [lc[0] for lc in langs_compilers])
+        scan_command += list(scan_external_ldflags)
+
+        if self._gir_has_option('--sources-top-dirs'):
+            scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_source_dir(), state.root_subdir)]
+            scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_build_dir(), state.root_subdir)]
+
+        if '--warn-error' in scan_command:
+            FeatureDeprecated.single_use('gnome.generate_gir argument --warn-error', '0.55.0',
+                                         state.subproject, 'Use "fatal_warnings" keyword argument', state.current_node)
+        if kwargs['fatal_warnings']:
+            scan_command.append('--warn-error')
+
+        generated_files = [f for f in libsources if isinstance(f, (GeneratedList, CustomTarget, CustomTargetIndex))]
+
+        scan_target = self._make_gir_target(
+            state, girfile, scan_command, generated_files, depends,
+            # We have to cast here because mypy can't figure this out
+            T.cast('T.Dict[str, T.Any]', kwargs))
+
+        typelib_output = f'{ns}-{nsversion}.typelib'
+        typelib_cmd = [gicompiler, scan_target, '--output', '@OUTPUT@']
+        typelib_cmd += state.get_include_args(gir_inc_dirs, prefix='--includedir=')
+
+        for incdir in typelib_includes:
+            typelib_cmd += ["--includedir=" + incdir]
+
+        typelib_target = self._make_typelib_target(state, typelib_output, typelib_cmd, generated_files, T.cast('T.Dict[str, T.Any]', kwargs))
+
+        self._devenv_prepend('GI_TYPELIB_PATH', os.path.join(state.environment.get_build_dir(), state.subdir))
+
+        rv = [scan_target, typelib_target]
+
+        return ModuleReturnValue(rv, rv)
+
+    @noPosargs
+    @typed_kwargs('gnome.compile_schemas', _BUILD_BY_DEFAULT.evolve(since='0.40.0'), DEPEND_FILES_KW)
+    def compile_schemas(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: 'CompileSchemas') -> ModuleReturnValue:
+        srcdir = os.path.join(state.build_to_src, state.subdir)
+        outdir = state.subdir
+
+        cmd: T.List[T.Union[ExternalProgram, Executable, OverrideProgram, str]] = [state.find_program('glib-compile-schemas'), '--targetdir', outdir, srcdir]
+        if state.subdir == '':
+            targetname = 'gsettings-compile'
+        else:
+            targetname = 'gsettings-compile-' + state.subdir.replace('/', '_')
+        target_g = build.CustomTarget(
+            targetname,
+            state.subdir,
+            state.subproject,
+            state.environment,
+            cmd,
+            [],
+            ['gschemas.compiled'],
+            build_by_default=kwargs['build_by_default'],
+            depend_files=kwargs['depend_files'],
+        )
+        self._devenv_prepend('GSETTINGS_SCHEMA_DIR', os.path.join(state.environment.get_build_dir(), state.subdir))
+        return ModuleReturnValue(target_g, [target_g])
+
+    @typed_pos_args('gnome.yelp', str, varargs=str)
+    @typed_kwargs(
+        'gnome.yelp',
+        KwargInfo(
+            'languages', ContainerTypeInfo(list, str),
+            listify=True, default=[],
+            deprecated='0.43.0',
+            deprecated_message='Use a LINGUAS file in the source directory instead',
+        ),
+        KwargInfo('media', ContainerTypeInfo(list, str), listify=True, default=[]),
+        KwargInfo('sources', ContainerTypeInfo(list, str), listify=True, default=[]),
+        KwargInfo('symlink_media', bool, default=True),
+    )
+    def yelp(self, state: 'ModuleState', args: T.Tuple[str, T.List[str]], kwargs: 'Yelp') -> ModuleReturnValue:
+        project_id = args[0]
+        sources = kwargs['sources']
+        if args[1]:
+            FeatureDeprecated.single_use('gnome.yelp more than one positional argument', '0.60.0',
+                                         state.subproject, 'use the "sources" keyword argument instead.', state.current_node)
+        if not sources:
+            sources = args[1]
+            if not sources:
+                raise MesonException('Yelp requires a list of sources')
+        elif args[1]:
+            mlog.warning('"gnome.yelp" ignores positional sources arguments when the "sources" keyword argument is set')
+        sources_files = [mesonlib.File.from_source_file(state.environment.source_dir,
+                                                        os.path.join(state.subdir, 'C'),
+                                                        s) for s in sources]
+
+        langs = kwargs['languages']
+        if not langs:
+            langs = read_linguas(os.path.join(state.environment.source_dir, state.subdir))
+
+        media = kwargs['media']
+        symlinks = kwargs['symlink_media']
+        targets: T.List[T.Union['build.Target', build.Data, build.SymlinkData]] = []
+        potargets: T.List[build.RunTarget] = []
+
+        itstool = state.find_program('itstool')
+        msgmerge = state.find_program('msgmerge')
+        msgfmt = state.find_program('msgfmt')
+
+        install_dir = os.path.join(state.environment.get_datadir(), 'help')
+        c_install_dir = os.path.join(install_dir, 'C', project_id)
+        c_data = build.Data(sources_files, c_install_dir, c_install_dir,
+                            mesonlib.FileMode(), state.subproject, install_tag='doc')
+        targets.append(c_data)
+
+        media_files: T.List[mesonlib.File] = []
+        for m in media:
+            f = mesonlib.File.from_source_file(state.environment.source_dir,
+                                               os.path.join(state.subdir, 'C'), m)
+            media_files.append(f)
+            m_install_dir = os.path.join(c_install_dir, os.path.dirname(m))
+            m_data = build.Data([f], m_install_dir, m_install_dir,
+                                mesonlib.FileMode(), state.subproject, install_tag='doc')
+            targets.append(m_data)
+
+        pot_file = os.path.join('@SOURCE_ROOT@', state.subdir, 'C', project_id + '.pot')
+        pot_sources = [os.path.join('@SOURCE_ROOT@', state.subdir, 'C', s) for s in sources]
+        pot_args: T.List[T.Union[ExternalProgram, Executable, OverrideProgram, str]] = [itstool, '-o', pot_file]
+        pot_args.extend(pot_sources)
+        pottarget = build.RunTarget(f'help-{project_id}-pot', pot_args, [],
+                                    os.path.join(state.subdir, 'C'), state.subproject,
+                                    state.environment)
+        targets.append(pottarget)
+
+        for l in langs:
+            l_subdir = os.path.join(state.subdir, l)
+            l_install_dir = os.path.join(install_dir, l, project_id)
+
+            for i, m in enumerate(media):
+                m_dir = os.path.dirname(m)
+                m_install_dir = os.path.join(l_install_dir, m_dir)
+                l_data: T.Union[build.Data, build.SymlinkData]
+                if symlinks:
+                    link_target = os.path.join(os.path.relpath(c_install_dir, start=m_install_dir), m)
+                    l_data = build.SymlinkData(link_target, os.path.basename(m),
+                                               m_install_dir, state.subproject, install_tag='doc')
+                else:
+                    try:
+                        m_file = mesonlib.File.from_source_file(state.environment.source_dir, l_subdir, m)
+                    except MesonException:
+                        m_file = media_files[i]
+                    l_data = build.Data([m_file], m_install_dir, m_install_dir,
+                                        mesonlib.FileMode(), state.subproject, install_tag='doc')
+                targets.append(l_data)
+
+            po_file = l + '.po'
+            po_args: T.List[T.Union[ExternalProgram, Executable, OverrideProgram, str]] = [
+                msgmerge, '-q', '-o',
+                os.path.join('@SOURCE_ROOT@', l_subdir, po_file),
+                os.path.join('@SOURCE_ROOT@', l_subdir, po_file), pot_file]
+            potarget = build.RunTarget(f'help-{project_id}-{l}-update-po',
+                                       po_args, [pottarget], l_subdir, state.subproject,
+                                       state.environment)
+            targets.append(potarget)
+            potargets.append(potarget)
+
+            gmo_file = project_id + '-' + l + '.gmo'
+            gmotarget = build.CustomTarget(
+                f'help-{project_id}-{l}-gmo',
+                l_subdir,
+                state.subproject,
+                state.environment,
+                [msgfmt, '@INPUT@', '-o', '@OUTPUT@'],
+                [po_file],
+                [gmo_file],
+                install_tag=['doc'],
+            )
+            targets.append(gmotarget)
+
+            mergetarget = build.CustomTarget(
+                f'help-{project_id}-{l}',
+                l_subdir,
+                state.subproject,
+                state.environment,
+                [itstool, '-m', os.path.join(l_subdir, gmo_file), '--lang', l, '-o', '@OUTDIR@', '@INPUT@'],
+                sources_files,
+                sources,
+                extra_depends=[gmotarget],
+                install=True,
+                install_dir=[l_install_dir],
+                install_tag=['doc'],
+            )
+            targets.append(mergetarget)
+
+        allpotarget = build.AliasTarget(f'help-{project_id}-update-po', potargets,
+                                        state.subdir, state.subproject, state.environment)
+        targets.append(allpotarget)
+
+        return ModuleReturnValue(None, targets)
+
+    @typed_pos_args('gnome.gtkdoc', str)
+    @typed_kwargs(
+        'gnome.gtkdoc',
+        KwargInfo('c_args', ContainerTypeInfo(list, str), since='0.48.0', default=[], listify=True),
+        KwargInfo('check', bool, default=False, since='0.52.0'),
+        KwargInfo('content_files', ContainerTypeInfo(list, (str, mesonlib.File, build.GeneratedList, build.CustomTarget, build.CustomTargetIndex)), default=[], listify=True),
+        KwargInfo(
+            'dependencies',
+            ContainerTypeInfo(list, (Dependency, build.SharedLibrary, build.StaticLibrary)),
+            listify=True, default=[]),
+        KwargInfo('expand_content_files', ContainerTypeInfo(list, (str, mesonlib.File)), default=[], listify=True),
+        KwargInfo('fixxref_args', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo('gobject_typesfile', ContainerTypeInfo(list, (str, mesonlib.File)), default=[], listify=True),
+        KwargInfo('html_args', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo('html_assets', ContainerTypeInfo(list, (str, mesonlib.File)), default=[], listify=True),
+        KwargInfo('ignore_headers', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo(
+            'include_directories',
+            ContainerTypeInfo(list, (str, build.IncludeDirs)),
+            listify=True, default=[]),
+        KwargInfo('install', bool, default=True),
+        KwargInfo('install_dir', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo('main_sgml', (str, NoneType)),
+        KwargInfo('main_xml', (str, NoneType)),
+        KwargInfo('mkdb_args', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo(
+            'mode', str, default='auto', since='0.37.0',
+            validator=in_set_validator({'xml', 'sgml', 'none', 'auto'})),
+        KwargInfo('module_version', str, default='', since='0.48.0'),
+        KwargInfo('namespace', str, default='', since='0.37.0'),
+        KwargInfo('scan_args', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo('scanobjs_args', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo('src_dir', ContainerTypeInfo(list, (str, build.IncludeDirs)), listify=True, required=True),
+    )
+    def gtkdoc(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'GtkDoc') -> ModuleReturnValue:
+        modulename = args[0]
+        main_file = kwargs['main_sgml']
+        main_xml = kwargs['main_xml']
+        if main_xml is not None:
+            if main_file is not None:
+                raise InvalidArguments('gnome.gtkdoc: main_xml and main_sgml are exclusive arguments')
+            main_file = main_xml
+        moduleversion = kwargs['module_version']
+        targetname = modulename + ('-' + moduleversion if moduleversion else '') + '-doc'
+        command = state.environment.get_build_command()
+
+        namespace = kwargs['namespace']
+
+        def abs_filenames(files: T.Iterable['FileOrString']) -> T.Iterator[str]:
+            for f in files:
+                if isinstance(f, mesonlib.File):
+                    yield f.absolute_path(state.environment.get_source_dir(), state.environment.get_build_dir())
+                else:
+                    yield os.path.join(state.environment.get_source_dir(), state.subdir, f)
+
+        src_dirs = kwargs['src_dir']
+        header_dirs: T.List[str] = []
+        for src_dir in src_dirs:
+            if isinstance(src_dir, build.IncludeDirs):
+                header_dirs.extend(src_dir.to_string_list(state.environment.get_source_dir(),
+                                                          state.environment.get_build_dir()))
+            else:
+                header_dirs.append(src_dir)
+
+        t_args: T.List[str] = [
+            '--internal', 'gtkdoc',
+            '--sourcedir=' + state.environment.get_source_dir(),
+            '--builddir=' + state.environment.get_build_dir(),
+            '--subdir=' + state.subdir,
+            '--headerdirs=' + '@@'.join(header_dirs),
+            '--mainfile=' + main_file,
+            '--modulename=' + modulename,
+            '--moduleversion=' + moduleversion,
+            '--mode=' + kwargs['mode']]
+        for tool in ['scan', 'scangobj', 'mkdb', 'mkhtml', 'fixxref']:
+            program_name = 'gtkdoc-' + tool
+            program = state.find_program(program_name)
+            path = program.get_path()
+            assert path is not None, "This shouldn't be possible since program should be found"
+            t_args.append(f'--{program_name}={path}')
+        if namespace:
+            t_args.append('--namespace=' + namespace)
+        exe_wrapper = state.environment.get_exe_wrapper()
+        if exe_wrapper:
+            t_args.append('--run=' + ' '.join(exe_wrapper.get_command()))
+        t_args.append(f'--htmlargs={"@@".join(kwargs["html_args"])}')
+        t_args.append(f'--scanargs={"@@".join(kwargs["scan_args"])}')
+        t_args.append(f'--scanobjsargs={"@@".join(kwargs["scanobjs_args"])}')
+        t_args.append(f'--gobjects-types-file={"@@".join(abs_filenames(kwargs["gobject_typesfile"]))}')
+        t_args.append(f'--fixxrefargs={"@@".join(kwargs["fixxref_args"])}')
+        t_args.append(f'--mkdbargs={"@@".join(kwargs["mkdb_args"])}')
+        t_args.append(f'--html-assets={"@@".join(abs_filenames(kwargs["html_assets"]))}')
+
+        depends: T.List['build.GeneratedTypes'] = []
+        content_files = []
+        for s in kwargs['content_files']:
+            if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)):
+                depends.append(s)
+                for o in s.get_outputs():
+                    content_files.append(os.path.join(state.environment.get_build_dir(),
+                                                      state.backend.get_target_dir(s),
+                                                      o))
+            elif isinstance(s, mesonlib.File):
+                content_files.append(s.absolute_path(state.environment.get_source_dir(),
+                                                     state.environment.get_build_dir()))
+            elif isinstance(s, build.GeneratedList):
+                depends.append(s)
+                for gen_src in s.get_outputs():
+                    content_files.append(os.path.join(state.environment.get_source_dir(),
+                                                      state.subdir,
+                                                      gen_src))
+            else:
+                content_files.append(os.path.join(state.environment.get_source_dir(),
+                                                  state.subdir,
+                                                  s))
+        t_args += ['--content-files=' + '@@'.join(content_files)]
+
+        t_args.append(f'--expand-content-files={"@@".join(abs_filenames(kwargs["expand_content_files"]))}')
+        t_args.append(f'--ignore-headers={"@@".join(kwargs["ignore_headers"])}')
+        t_args.append(f'--installdir={"@@".join(kwargs["install_dir"])}')
+        build_args, new_depends = self._get_build_args(kwargs['c_args'], kwargs['include_directories'],
+                                                       kwargs['dependencies'], state, depends)
+        t_args.extend(build_args)
+        new_depends.extend(depends)
+        custom_target = build.CustomTarget(
+            targetname,
+            state.subdir,
+            state.subproject,
+            state.environment,
+            command + t_args,
+            [],
+            [f'{modulename}-decl.txt'],
+            build_always_stale=True,
+            extra_depends=new_depends,
+        )
+        alias_target = build.AliasTarget(targetname, [custom_target], state.subdir, state.subproject, state.environment)
+        if kwargs['check']:
+            check_cmd = state.find_program('gtkdoc-check')
+            check_env = ['DOC_MODULE=' + modulename,
+                         'DOC_MAIN_SGML_FILE=' + main_file]
+            check_args = (targetname + '-check', check_cmd)
+            check_workdir = os.path.join(state.environment.get_build_dir(), state.subdir)
+            state.test(check_args, env=check_env, workdir=check_workdir, depends=[custom_target])
+        res: T.List[T.Union[build.Target, build.ExecutableSerialisation]] = [custom_target, alias_target]
+        if kwargs['install']:
+            res.append(state.backend.get_executable_serialisation(command + t_args, tag='doc'))
+        return ModuleReturnValue(custom_target, res)
+
+    def _get_build_args(self, c_args: T.List[str], inc_dirs: T.List[T.Union[str, build.IncludeDirs]],
+                        deps: T.List[T.Union[Dependency, build.SharedLibrary, build.StaticLibrary]],
+                        state: 'ModuleState',
+                        depends: T.Sequence[T.Union[build.BuildTarget, 'build.GeneratedTypes']]) -> T.Tuple[
+                                T.List[str], T.List[T.Union[build.BuildTarget, 'build.GeneratedTypes', 'FileOrString', build.StructuredSources]]]:
+        args: T.List[str] = []
+        cflags = c_args.copy()
+        deps_cflags, internal_ldflags, external_ldflags, _gi_includes, new_depends = \
+            self._get_dependencies_flags(deps, state, depends, include_rpath=True)
+
+        cflags.extend(deps_cflags)
+        cflags.extend(state.get_include_args(inc_dirs))
+        ldflags: T.List[str] = []
+        ldflags.extend(internal_ldflags)
+        ldflags.extend(external_ldflags)
+
+        cflags.extend(state.environment.coredata.get_external_args(MachineChoice.HOST, 'c'))
+        ldflags.extend(state.environment.coredata.get_external_link_args(MachineChoice.HOST, 'c'))
+        compiler = state.environment.coredata.compilers[MachineChoice.HOST]['c']
+
+        compiler_flags = self._get_langs_compilers_flags(state, [('c', compiler)])
+        cflags.extend(compiler_flags[0])
+        ldflags.extend(compiler_flags[1])
+        ldflags.extend(compiler_flags[2])
+        if compiler:
+            args += ['--cc=%s' % join_args(compiler.get_exelist())]
+            args += ['--ld=%s' % join_args(compiler.get_linker_exelist())]
+        if cflags:
+            args += ['--cflags=%s' % join_args(cflags)]
+        if ldflags:
+            args += ['--ldflags=%s' % join_args(ldflags)]
+
+        return args, new_depends
+
+    @noKwargs
+    @typed_pos_args('gnome.gtkdoc_html_dir', str)
+    def gtkdoc_html_dir(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> str:
+        return os.path.join('share/gtk-doc/html', args[0])
+
+    @typed_pos_args('gnome.gdbus_codegen', str, optargs=[(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)])
+    @typed_kwargs(
+        'gnome.gdbus_codegen',
+        _BUILD_BY_DEFAULT.evolve(since='0.40.0'),
+        SOURCES_KW.evolve(since='0.46.0'),
+        KwargInfo('extra_args', ContainerTypeInfo(list, str), since='0.47.0', default=[], listify=True),
+        KwargInfo('interface_prefix', (str, NoneType)),
+        KwargInfo('namespace', (str, NoneType)),
+        KwargInfo('object_manager', bool, default=False),
+        KwargInfo(
+            'annotations', ContainerTypeInfo(list, (list, str)),
+            default=[],
+            validator=annotations_validator,
+            convertor=lambda x: [x] if x and isinstance(x[0], str) else x,
+        ),
+        KwargInfo('install_header', bool, default=False, since='0.46.0'),
+        KwargInfo('docbook', (str, NoneType)),
+        KwargInfo(
+            'autocleanup', str, default='default', since='0.47.0',
+            validator=in_set_validator({'all', 'none', 'objects'})),
+        INSTALL_DIR_KW.evolve(since='0.46.0')
+    )
+    def gdbus_codegen(self, state: 'ModuleState', args: T.Tuple[str, T.Optional[T.Union['FileOrString', build.GeneratedTypes]]],
+                      kwargs: 'GdbusCodegen') -> ModuleReturnValue:
+        namebase = args[0]
+        xml_files: T.List[T.Union['FileOrString', build.GeneratedTypes]] = [args[1]] if args[1] else []
+        cmd: T.List[T.Union[ExternalProgram, Executable, OverrideProgram, str]] = [state.find_program('gdbus-codegen')]
+        cmd.extend(kwargs['extra_args'])
+
+        # Autocleanup supported?
+        glib_version = self._get_native_glib_version(state)
+        if not mesonlib.version_compare(glib_version, '>= 2.49.1'):
+            # Warn if requested, silently disable if not
+            if kwargs['autocleanup'] != 'default':
+                mlog.warning(f'Glib version ({glib_version}) is too old to support the \'autocleanup\' '
+                             'kwarg, need 2.49.1 or newer')
+        else:
+            # Handle legacy glib versions that don't have autocleanup
+            ac = kwargs['autocleanup']
+            if ac == 'default':
+                ac = 'all'
+            cmd.extend(['--c-generate-autocleanup', ac])
+
+        if kwargs['interface_prefix'] is not None:
+            cmd.extend(['--interface-prefix', kwargs['interface_prefix']])
+        if kwargs['namespace'] is not None:
+            cmd.extend(['--c-namespace', kwargs['namespace']])
+        if kwargs['object_manager']:
+            cmd.extend(['--c-generate-object-manager'])
+        xml_files.extend(kwargs['sources'])
+        build_by_default = kwargs['build_by_default']
+
+        # Annotations are a bit ugly in that they are a list of lists of strings...
+        for annot in kwargs['annotations']:
+            cmd.append('--annotate')
+            cmd.extend(annot)
+
+        targets = []
+        install_header = kwargs['install_header']
+        install_dir = kwargs['install_dir'] or state.environment.coredata.get_option(mesonlib.OptionKey('includedir'))
+        assert isinstance(install_dir, str), 'for mypy'
+
+        output = namebase + '.c'
+        # Added in https://gitlab.gnome.org/GNOME/glib/commit/e4d68c7b3e8b01ab1a4231bf6da21d045cb5a816 (2.55.2)
+        # Fixed in https://gitlab.gnome.org/GNOME/glib/commit/cd1f82d8fc741a2203582c12cc21b4dacf7e1872 (2.56.2)
+        if mesonlib.version_compare(glib_version, '>= 2.56.2'):
+            c_cmd = cmd + ['--body', '--output', '@OUTPUT@', '@INPUT@']
+        else:
+            if kwargs['docbook'] is not None:
+                docbook = kwargs['docbook']
+
+                cmd += ['--generate-docbook', docbook]
+
+            # https://git.gnome.org/browse/glib/commit/?id=ee09bb704fe9ccb24d92dd86696a0e6bb8f0dc1a
+            if mesonlib.version_compare(glib_version, '>= 2.51.3'):
+                cmd += ['--output-directory', '@OUTDIR@', '--generate-c-code', namebase, '@INPUT@']
+            else:
+                self._print_gdbus_warning()
+                cmd += ['--generate-c-code', '@OUTDIR@/' + namebase, '@INPUT@']
+            c_cmd = cmd
+
+        cfile_custom_target = build.CustomTarget(
+            output,
+            state.subdir,
+            state.subproject,
+            state.environment,
+            c_cmd,
+            xml_files,
+            [output],
+            build_by_default=build_by_default,
+        )
+        targets.append(cfile_custom_target)
+
+        output = namebase + '.h'
+        if mesonlib.version_compare(glib_version, '>= 2.56.2'):
+            hfile_cmd = cmd + ['--header', '--output', '@OUTPUT@', '@INPUT@']
+            depends = []
+        else:
+            hfile_cmd = cmd
+            depends = [cfile_custom_target]
+
+        hfile_custom_target = build.CustomTarget(
+            output,
+            state.subdir,
+            state.subproject,
+            state.environment,
+            hfile_cmd,
+            xml_files,
+            [output],
+            build_by_default=build_by_default,
+            extra_depends=depends,
+            install=install_header,
+            install_dir=[install_dir],
+            install_tag=['devel'],
+        )
+        targets.append(hfile_custom_target)
+
+        if kwargs['docbook'] is not None:
+            docbook = kwargs['docbook']
+            # The docbook output is always ${docbook}-${name_of_xml_file}
+            output = namebase + '-docbook'
+            outputs = []
+            for f in xml_files:
+                outputs.append('{}-{}'.format(docbook, os.path.basename(str(f))))
+
+            if mesonlib.version_compare(glib_version, '>= 2.56.2'):
+                docbook_cmd = cmd + ['--output-directory', '@OUTDIR@', '--generate-docbook', docbook, '@INPUT@']
+                depends = []
+            else:
+                docbook_cmd = cmd
+                depends = [cfile_custom_target]
+
+            docbook_custom_target = build.CustomTarget(
+                output,
+                state.subdir,
+                state.subproject,
+                state.environment,
+                docbook_cmd,
+                xml_files,
+                outputs,
+                build_by_default=build_by_default,
+                extra_depends=depends,
+            )
+            targets.append(docbook_custom_target)
+
+        return ModuleReturnValue(targets, targets)
+
+    @typed_pos_args('gnome.mkenums', str)
+    @typed_kwargs(
+        'gnome.mkenums',
+        *_MK_ENUMS_COMMON_KWS,
+        DEPENDS_KW,
+        KwargInfo('c_template', (str, mesonlib.File, NoneType)),
+        KwargInfo('h_template', (str, mesonlib.File, NoneType)),
+        KwargInfo('comments', (str, NoneType)),
+        KwargInfo('eprod', (str, NoneType)),
+        KwargInfo('fhead', (str, NoneType)),
+        KwargInfo('fprod', (str, NoneType)),
+        KwargInfo('ftail', (str, NoneType)),
+        KwargInfo('vhead', (str, NoneType)),
+        KwargInfo('vprod', (str, NoneType)),
+        KwargInfo('vtail', (str, NoneType)),
+    )
+    def mkenums(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'MkEnums') -> ModuleReturnValue:
+        basename = args[0]
+
+        c_template = kwargs['c_template']
+        if isinstance(c_template, mesonlib.File):
+            c_template = c_template.absolute_path(state.environment.source_dir, state.environment.build_dir)
+        h_template = kwargs['h_template']
+        if isinstance(h_template, mesonlib.File):
+            h_template = h_template.absolute_path(state.environment.source_dir, state.environment.build_dir)
+
+        cmd: T.List[str] = []
+        known_kwargs = ['comments', 'eprod', 'fhead', 'fprod', 'ftail',
+                        'identifier_prefix', 'symbol_prefix',
+                        'vhead', 'vprod', 'vtail']
+        for arg in known_kwargs:
+            # mypy can't figure this out
+            if kwargs[arg]:                                         # type: ignore
+                cmd += ['--' + arg.replace('_', '-'), kwargs[arg]]  # type: ignore
+
+        targets: T.List[CustomTarget] = []
+
+        h_target: T.Optional[CustomTarget] = None
+        if h_template is not None:
+            h_output = os.path.basename(os.path.splitext(h_template)[0])
+            # We always set template as the first element in the source array
+            # so --template consumes it.
+            h_cmd = cmd + ['--template', '@INPUT@']
+            h_sources: T.List[T.Union[FileOrString, 'build.GeneratedTypes']] = [h_template]
+            h_sources.extend(kwargs['sources'])
+            h_target = self._make_mkenum_impl(
+                state, h_sources, h_output, h_cmd, install=kwargs['install_header'],
+                install_dir=kwargs['install_dir'])
+            targets.append(h_target)
+
+        if c_template is not None:
+            c_output = os.path.basename(os.path.splitext(c_template)[0])
+            # We always set template as the first element in the source array
+            # so --template consumes it.
+            c_cmd = cmd + ['--template', '@INPUT@']
+            c_sources: T.List[T.Union[FileOrString, 'build.GeneratedTypes']] = [c_template]
+            c_sources.extend(kwargs['sources'])
+
+            depends = kwargs['depends'].copy()
+            if h_target is not None:
+                depends.append(h_target)
+            c_target = self._make_mkenum_impl(
+                state, c_sources, c_output, c_cmd, depends=depends)
+            targets.insert(0, c_target)
+
+        if c_template is None and h_template is None:
+            generic_cmd = cmd + ['@INPUT@']
+            target = self._make_mkenum_impl(
+                state, kwargs['sources'], basename, generic_cmd,
+                install=kwargs['install_header'],
+                install_dir=kwargs['install_dir'])
+            return ModuleReturnValue(target, [target])
+        else:
+            return ModuleReturnValue(targets, targets)
+
+    @FeatureNew('gnome.mkenums_simple', '0.42.0')
+    @typed_pos_args('gnome.mkenums_simple', str)
+    @typed_kwargs(
+        'gnome.mkenums_simple',
+        *_MK_ENUMS_COMMON_KWS,
+        KwargInfo('header_prefix', str, default=''),
+        KwargInfo('function_prefix', str, default=''),
+        KwargInfo('body_prefix', str, default=''),
+        KwargInfo('decorator', str, default=''),
+    )
+    def mkenums_simple(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'MkEnumsSimple') -> ModuleReturnValue:
+        hdr_filename = f'{args[0]}.h'
+        body_filename = f'{args[0]}.c'
+
+        header_prefix = kwargs['header_prefix']
+        decl_decorator = kwargs['decorator']
+        func_prefix = kwargs['function_prefix']
+        body_prefix = kwargs['body_prefix']
+
+        cmd: T.List[str] = []
+        if kwargs['identifier_prefix']:
+            cmd.extend(['--identifier-prefix', kwargs['identifier_prefix']])
+        if kwargs['symbol_prefix']:
+            cmd.extend(['--symbol-prefix', kwargs['symbol_prefix']])
+
+        c_cmd = cmd.copy()
+        # Maybe we should write our own template files into the build dir
+        # instead, but that seems like much more work, nice as it would be.
+        fhead = ''
+        if body_prefix != '':
+            fhead += '%s\n' % body_prefix
+        fhead += '#include "%s"\n' % hdr_filename
+        for hdr in kwargs['sources']:
+            fhead += '#include "{}"\n'.format(os.path.basename(str(hdr)))
+        fhead += textwrap.dedent(
+            '''
+            #define C_ENUM(v) ((gint) v)
+            #define C_FLAGS(v) ((guint) v)
+            ''')
+        c_cmd.extend(['--fhead', fhead])
+
+        c_cmd.append('--fprod')
+        c_cmd.append(textwrap.dedent(
+            '''
+            /* enumerations from "@basename@" */
+            '''))
+
+        c_cmd.append('--vhead')
+        c_cmd.append(textwrap.dedent(
+            f'''
+            GType
+            {func_prefix}@enum_name@_get_type (void)
+            {{
+            static gsize gtype_id = 0;
+            static const G@Type@Value values[] = {{'''))
+
+        c_cmd.extend(['--vprod', '    { C_@TYPE@(@VALUENAME@), "@VALUENAME@", "@valuenick@" },'])
+
+        c_cmd.append('--vtail')
+        c_cmd.append(textwrap.dedent(
+            '''    { 0, NULL, NULL }
+            };
+            if (g_once_init_enter (>ype_id)) {
+                GType new_type = g_@type@_register_static (g_intern_static_string ("@EnumName@"), values);
+                g_once_init_leave (>ype_id, new_type);
+            }
+            return (GType) gtype_id;
+            }'''))
+        c_cmd.append('@INPUT@')
+
+        c_file = self._make_mkenum_impl(state, kwargs['sources'], body_filename, c_cmd)
+
+        # .h file generation
+        h_cmd = cmd.copy()
+
+        h_cmd.append('--fhead')
+        h_cmd.append(textwrap.dedent(
+            f'''#pragma once
+
+            #include 
+            {header_prefix}
+
+            G_BEGIN_DECLS
+            '''))
+
+        h_cmd.append('--fprod')
+        h_cmd.append(textwrap.dedent(
+            '''
+            /* enumerations from "@basename@" */
+            '''))
+
+        h_cmd.append('--vhead')
+        h_cmd.append(textwrap.dedent(
+            f'''
+            {decl_decorator}
+            GType {func_prefix}@enum_name@_get_type (void);
+            #define @ENUMPREFIX@_TYPE_@ENUMSHORT@ ({func_prefix}@enum_name@_get_type())'''))
+
+        h_cmd.append('--ftail')
+        h_cmd.append(textwrap.dedent(
+            '''
+            G_END_DECLS'''))
+        h_cmd.append('@INPUT@')
+
+        h_file = self._make_mkenum_impl(
+            state, kwargs['sources'], hdr_filename, h_cmd,
+            install=kwargs['install_header'],
+            install_dir=kwargs['install_dir'])
+
+        return ModuleReturnValue([c_file, h_file], [c_file, h_file])
+
+    @staticmethod
+    def _make_mkenum_impl(
+            state: 'ModuleState',
+            sources: T.Sequence[T.Union[str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]],
+            output: str,
+            cmd: T.List[str],
+            *,
+            install: bool = False,
+            install_dir: T.Optional[T.Sequence[T.Union[str, bool]]] = None,
+            depends: T.Optional[T.Sequence[T.Union[CustomTarget, CustomTargetIndex, BuildTarget]]] = None
+            ) -> build.CustomTarget:
+        real_cmd: T.List[T.Union[ExternalProgram, Executable, OverrideProgram, str]] = [state.find_program(['glib-mkenums', 'mkenums'])]
+        real_cmd.extend(cmd)
+        _install_dir = install_dir or state.environment.coredata.get_option(mesonlib.OptionKey('includedir'))
+        assert isinstance(_install_dir, str), 'for mypy'
+
+        return build.CustomTarget(
+            output,
+            state.subdir,
+            state.subproject,
+            state.environment,
+            real_cmd,
+            sources,
+            [output],
+            capture=True,
+            install=install,
+            install_dir=[_install_dir],
+            install_tag=['devel'],
+            extra_depends=depends,
+            # https://github.com/mesonbuild/meson/issues/973
+            absolute_paths=True,
+        )
+
+    @typed_pos_args('gnome.genmarshal', str)
+    @typed_kwargs(
+        'gnome.genmarshal',
+        DEPEND_FILES_KW.evolve(since='0.61.0'),
+        DEPENDS_KW.evolve(since='0.61.0'),
+        INSTALL_KW.evolve(name='install_header'),
+        INSTALL_DIR_KW,
+        KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+        KwargInfo('internal', bool, default=False),
+        KwargInfo('nostdinc', bool, default=False),
+        KwargInfo('prefix', (str, NoneType)),
+        KwargInfo('skip_source', bool, default=False),
+        KwargInfo('sources', ContainerTypeInfo(list, (str, mesonlib.File), allow_empty=False), listify=True, required=True),
+        KwargInfo('stdinc', bool, default=False),
+        KwargInfo('valist_marshallers', bool, default=False),
+    )
+    def genmarshal(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'GenMarshal') -> ModuleReturnValue:
+        output = args[0]
+        sources = kwargs['sources']
+
+        new_genmarshal = mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.53.3')
+
+        cmd: T.List[T.Union[ExternalProgram, Executable, OverrideProgram, str]] = [state.find_program('glib-genmarshal')]
+        if kwargs['prefix']:
+            cmd.extend(['--prefix', kwargs['prefix']])
+        if kwargs['extra_args']:
+            if new_genmarshal:
+                cmd.extend(kwargs['extra_args'])
+            else:
+                mlog.warning('The current version of GLib does not support extra arguments \n'
+                             'for glib-genmarshal. You need at least GLib 2.53.3. See ',
+                             mlog.bold('https://github.com/mesonbuild/meson/pull/2049'),
+                             once=True, fatal=False)
+        for k in ['internal', 'nostdinc', 'skip_source', 'stdinc', 'valist_marshallers']:
+            # Mypy can't figure out that this is correct
+            if kwargs[k]:                                            # type: ignore
+                cmd.append(f'--{k.replace("_", "-")}')
+
+        install_header = kwargs['install_header']
+        capture = False
+
+        # https://github.com/GNOME/glib/commit/0fbc98097fac4d3e647684f344e508abae109fdf
+        if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.51.0'):
+            cmd += ['--output', '@OUTPUT@']
+        else:
+            capture = True
+
+        header_file = output + '.h'
+        h_cmd = cmd + ['--header', '@INPUT@']
+        if new_genmarshal:
+            h_cmd += ['--pragma-once']
+        header = build.CustomTarget(
+            output + '_h',
+            state.subdir,
+            state.subproject,
+            state.environment,
+            h_cmd,
+            sources,
+            [header_file],
+            install=install_header,
+            install_dir=[kwargs['install_dir']] if kwargs['install_dir'] else [],
+            install_tag=['devel'],
+            capture=capture,
+            depend_files=kwargs['depend_files'],
+        )
+
+        c_cmd = cmd + ['--body', '@INPUT@']
+        extra_deps: T.List[build.CustomTarget] = []
+        if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.53.4'):
+            # Silence any warnings about missing prototypes
+            c_cmd += ['--include-header', header_file]
+            extra_deps.append(header)
+        body = build.CustomTarget(
+            output + '_c',
+            state.subdir,
+            state.subproject,
+            state.environment,
+            c_cmd,
+            sources,
+            [f'{output}.c'],
+            capture=capture,
+            depend_files=kwargs['depend_files'],
+            extra_depends=extra_deps,
+        )
+
+        rv = [body, header]
+        return ModuleReturnValue(rv, rv)
+
+    def _extract_vapi_packages(self, state: 'ModuleState', packages: T.List[T.Union[InternalDependency, str]],
+                               ) -> T.Tuple[T.List[str], T.List[VapiTarget], T.List[str], T.List[str], T.List[str]]:
+        '''
+        Packages are special because we need to:
+        - Get a list of packages for the .deps file
+        - Get a list of depends for any VapiTargets
+        - Get package name from VapiTargets
+        - Add include dirs for any VapiTargets
+        '''
+        if not packages:
+            return [], [], [], [], []
+        vapi_depends: T.List[VapiTarget] = []
+        vapi_packages: T.List[str] = []
+        vapi_includes: T.List[str] = []
+        vapi_args: T.List[str] = []
+        remaining_args = []
+        for arg in packages:
+            if isinstance(arg, InternalDependency):
+                targets = [t for t in arg.sources if isinstance(t, VapiTarget)]
+                for target in targets:
+                    srcdir = os.path.join(state.environment.get_source_dir(),
+                                          target.get_subdir())
+                    outdir = os.path.join(state.environment.get_build_dir(),
+                                          target.get_subdir())
+                    outfile = target.get_outputs()[0][:-5] # Strip .vapi
+                    vapi_args.append('--vapidir=' + outdir)
+                    vapi_args.append('--girdir=' + outdir)
+                    vapi_args.append('--pkg=' + outfile)
+                    vapi_depends.append(target)
+                    vapi_packages.append(outfile)
+                    vapi_includes.append(srcdir)
+            else:
+                assert isinstance(arg, str), 'for mypy'
+                vapi_args.append(f'--pkg={arg}')
+                vapi_packages.append(arg)
+                remaining_args.append(arg)
+
+        # TODO: this is supposed to take IncludeDirs, but it never worked
+        return vapi_args, vapi_depends, vapi_packages, vapi_includes, remaining_args
+
+    def _generate_deps(self, state: 'ModuleState', library: str, packages: T.List[str], install_dir: str) -> build.Data:
+        outdir = state.environment.scratch_dir
+        fname = os.path.join(outdir, library + '.deps')
+        with open(fname, 'w', encoding='utf-8') as ofile:
+            for package in packages:
+                ofile.write(package + '\n')
+        return build.Data([mesonlib.File(True, outdir, fname)], install_dir, install_dir, mesonlib.FileMode(), state.subproject)
+
+    def _get_vapi_link_with(self, target: build.CustomTarget) -> T.List[build.LibTypes]:
+        link_with: T.List[build.LibTypes] = []
+        for dep in target.get_target_dependencies():
+            if isinstance(dep, build.SharedLibrary):
+                link_with.append(dep)
+            elif isinstance(dep, GirTarget):
+                link_with += self._get_vapi_link_with(dep)
+        return link_with
+
+    @typed_pos_args('gnome.generate_vapi', str)
+    @typed_kwargs(
+        'gnome.generate_vapi',
+        INSTALL_KW,
+        INSTALL_DIR_KW,
+        KwargInfo(
+            'sources',
+            ContainerTypeInfo(list, (str, GirTarget), allow_empty=False),
+            listify=True,
+            required=True,
+        ),
+        KwargInfo('vapi_dirs', ContainerTypeInfo(list, str), listify=True, default=[]),
+        KwargInfo('metadata_dirs', ContainerTypeInfo(list, str), listify=True, default=[]),
+        KwargInfo('gir_dirs', ContainerTypeInfo(list, str), listify=True, default=[]),
+        KwargInfo('packages', ContainerTypeInfo(list, (str, InternalDependency)), listify=True, default=[]),
+    )
+    def generate_vapi(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'GenerateVapi') -> ModuleReturnValue:
+        created_values: T.List[T.Union[Dependency, build.Data]] = []
+        library = args[0]
+        build_dir = os.path.join(state.environment.get_build_dir(), state.subdir)
+        source_dir = os.path.join(state.environment.get_source_dir(), state.subdir)
+        pkg_cmd, vapi_depends, vapi_packages, vapi_includes, packages = self._extract_vapi_packages(state, kwargs['packages'])
+        cmd: T.List[T.Union[ExternalProgram, Executable, OverrideProgram, str]]
+        cmd = [state.find_program('vapigen'), '--quiet', f'--library={library}', f'--directory={build_dir}']
+        cmd.extend([f'--vapidir={d}' for d in kwargs['vapi_dirs']])
+        cmd.extend([f'--metadatadir={d}' for d in kwargs['metadata_dirs']])
+        cmd.extend([f'--girdir={d}' for d in kwargs['gir_dirs']])
+        cmd += pkg_cmd
+        cmd += ['--metadatadir=' + source_dir]
+
+        inputs = kwargs['sources']
+
+        link_with: T.List[build.LibTypes] = []
+        for i in inputs:
+            if isinstance(i, str):
+                cmd.append(os.path.join(source_dir, i))
+            elif isinstance(i, GirTarget):
+                link_with += self._get_vapi_link_with(i)
+                subdir = os.path.join(state.environment.get_build_dir(),
+                                      i.get_subdir())
+                gir_file = os.path.join(subdir, i.get_outputs()[0])
+                cmd.append(gir_file)
+
+        vapi_output = library + '.vapi'
+        datadir = state.environment.coredata.get_option(mesonlib.OptionKey('datadir'))
+        assert isinstance(datadir, str), 'for mypy'
+        install_dir = kwargs['install_dir'] or os.path.join(datadir, 'vala', 'vapi')
+
+        if kwargs['install']:
+            # We shouldn't need this locally but we install it
+            deps_target = self._generate_deps(state, library, vapi_packages, install_dir)
+            created_values.append(deps_target)
+        vapi_target = VapiTarget(
+            vapi_output,
+            state.subdir,
+            state.subproject,
+            state.environment,
+            command=cmd,
+            sources=inputs,
+            outputs=[vapi_output],
+            extra_depends=vapi_depends,
+            install=kwargs['install'],
+            install_dir=[install_dir],
+            install_tag=['devel'],
+        )
+
+        # So to try our best to get this to just work we need:
+        # - link with the correct library
+        # - include the vapi and dependent vapi files in sources
+        # - add relevant directories to include dirs
+        incs = [build.IncludeDirs(state.subdir, ['.'] + vapi_includes, False)]
+        sources = [vapi_target] + vapi_depends
+        rv = InternalDependency(None, incs, [], [], link_with, [], sources, [], [], {}, [], [], [])
+        created_values.append(rv)
+        return ModuleReturnValue(rv, created_values)
+
+def initialize(interp: 'Interpreter') -> GnomeModule:
+    mod = GnomeModule(interp)
+    mod.interpreter.append_holder_map(GResourceTarget, interpreter.CustomTargetHolder)
+    mod.interpreter.append_holder_map(GResourceHeaderTarget, interpreter.CustomTargetHolder)
+    mod.interpreter.append_holder_map(GirTarget, interpreter.CustomTargetHolder)
+    mod.interpreter.append_holder_map(TypelibTarget, interpreter.CustomTargetHolder)
+    mod.interpreter.append_holder_map(VapiTarget, interpreter.CustomTargetHolder)
+    return mod
diff --git a/vendored-meson/meson/mesonbuild/modules/hotdoc.py b/vendored-meson/meson/mesonbuild/modules/hotdoc.py
new file mode 100644
index 000000000000..ad5ae3079dba
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/hotdoc.py
@@ -0,0 +1,471 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+'''This module provides helper functions for generating documentation using hotdoc'''
+
+import os
+import subprocess
+
+from mesonbuild import mesonlib
+from mesonbuild import mlog, build
+from mesonbuild.coredata import MesonException
+from . import ModuleReturnValue, ModuleInfo
+from . import ExtensionModule
+from ..dependencies import Dependency, InternalDependency
+from ..interpreterbase import (
+    InvalidArguments, noPosargs, noKwargs, typed_kwargs, FeatureDeprecated,
+    ContainerTypeInfo, KwargInfo, typed_pos_args
+)
+from ..interpreter import CustomTargetHolder
+from ..interpreter.type_checking import NoneType
+from ..programs import ExternalProgram
+
+
+def ensure_list(value):
+    if not isinstance(value, list):
+        return [value]
+    return value
+
+
+MIN_HOTDOC_VERSION = '0.8.100'
+
+file_types = (str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex)
+
+
+class HotdocTargetBuilder:
+
+    def __init__(self, name, state, hotdoc, interpreter, kwargs):
+        self.hotdoc = hotdoc
+        self.build_by_default = kwargs.pop('build_by_default', False)
+        self.kwargs = kwargs
+        self.name = name
+        self.state = state
+        self.interpreter = interpreter
+        self.include_paths = mesonlib.OrderedSet()
+
+        self.builddir = state.environment.get_build_dir()
+        self.sourcedir = state.environment.get_source_dir()
+        self.subdir = state.subdir
+        self.build_command = state.environment.get_build_command()
+
+        self.cmd = ['conf', '--project-name', name, "--disable-incremental-build",
+                    '--output', os.path.join(self.builddir, self.subdir, self.name + '-doc')]
+
+        self._extra_extension_paths = set()
+        self.extra_assets = set()
+        self.extra_depends = []
+        self._subprojects = []
+
+    def process_known_arg(self, option, argname=None, value_processor=None):
+        if not argname:
+            argname = option.strip("-").replace("-", "_")
+
+        value = self.kwargs.pop(argname)
+        if value is not None and value_processor:
+            value = value_processor(value)
+
+        self.set_arg_value(option, value)
+
+    def set_arg_value(self, option, value):
+        if value is None:
+            return
+
+        if isinstance(value, bool):
+            if value:
+                self.cmd.append(option)
+        elif isinstance(value, list):
+            # Do not do anything on empty lists
+            if value:
+                # https://bugs.python.org/issue9334 (from 2010 :( )
+                # The syntax with nargs=+ is inherently ambiguous
+                # A workaround for this case is to simply prefix with a space
+                # every value starting with a dash
+                escaped_value = []
+                for e in value:
+                    if isinstance(e, str) and e.startswith('-'):
+                        escaped_value += [' %s' % e]
+                    else:
+                        escaped_value += [e]
+                if option:
+                    self.cmd.extend([option] + escaped_value)
+                else:
+                    self.cmd.extend(escaped_value)
+        else:
+            # argparse gets confused if value(s) start with a dash.
+            # When an option expects a single value, the unambiguous way
+            # to specify it is with =
+            if isinstance(value, str):
+                self.cmd.extend([f'{option}={value}'])
+            else:
+                self.cmd.extend([option, value])
+
+    def check_extra_arg_type(self, arg, value):
+        if isinstance(value, list):
+            for v in value:
+                self.check_extra_arg_type(arg, v)
+            return
+
+        valid_types = (str, bool, mesonlib.File, build.IncludeDirs, build.CustomTarget, build.CustomTargetIndex, build.BuildTarget)
+        if not isinstance(value, valid_types):
+            raise InvalidArguments('Argument "{}={}" should be of type: {}.'.format(
+                arg, value, [t.__name__ for t in valid_types]))
+
+    def process_extra_args(self):
+        for arg, value in self.kwargs.items():
+            option = "--" + arg.replace("_", "-")
+            self.check_extra_arg_type(arg, value)
+            self.set_arg_value(option, value)
+
+    def get_value(self, types, argname, default=None, value_processor=None,
+                  mandatory=False, force_list=False):
+        if not isinstance(types, list):
+            types = [types]
+        try:
+            uvalue = value = self.kwargs.pop(argname)
+            if value_processor:
+                value = value_processor(value)
+
+            for t in types:
+                if isinstance(value, t):
+                    if force_list and not isinstance(value, list):
+                        return [value], uvalue
+                    return value, uvalue
+            raise MesonException(f"{argname} field value {value} is not valid,"
+                                 f" valid types are {types}")
+        except KeyError:
+            if mandatory:
+                raise MesonException(f"{argname} mandatory field not found")
+
+            if default is not None:
+                return default, default
+
+        return None, None
+
+    def add_extension_paths(self, paths):
+        for path in paths:
+            if path in self._extra_extension_paths:
+                continue
+
+            self._extra_extension_paths.add(path)
+            self.cmd.extend(["--extra-extension-path", path])
+
+    def replace_dirs_in_string(self, string):
+        return string.replace("@SOURCE_ROOT@", self.sourcedir).replace("@BUILD_ROOT@", self.builddir)
+
+    def process_gi_c_source_roots(self):
+        if self.hotdoc.run_hotdoc(['--has-extension=gi-extension']) != 0:
+            return
+
+        value = self.kwargs.pop('gi_c_source_roots')
+        value.extend([
+            os.path.join(self.sourcedir, self.state.root_subdir),
+            os.path.join(self.builddir, self.state.root_subdir)
+        ])
+
+        self.cmd += ['--gi-c-source-roots'] + value
+
+    def process_dependencies(self, deps):
+        cflags = set()
+        for dep in mesonlib.listify(ensure_list(deps)):
+            if isinstance(dep, InternalDependency):
+                inc_args = self.state.get_include_args(dep.include_directories)
+                cflags.update([self.replace_dirs_in_string(x)
+                               for x in inc_args])
+                cflags.update(self.process_dependencies(dep.libraries))
+                cflags.update(self.process_dependencies(dep.sources))
+                cflags.update(self.process_dependencies(dep.ext_deps))
+            elif isinstance(dep, Dependency):
+                cflags.update(dep.get_compile_args())
+            elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)):
+                self.extra_depends.append(dep)
+                for incd in dep.get_include_dirs():
+                    cflags.update(incd.get_incdirs())
+            elif isinstance(dep, HotdocTarget):
+                # Recurse in hotdoc target dependencies
+                self.process_dependencies(dep.get_target_dependencies())
+                self._subprojects.extend(dep.subprojects)
+                self.process_dependencies(dep.subprojects)
+                self.include_paths.add(os.path.join(self.builddir, dep.hotdoc_conf.subdir))
+                self.cmd += ['--extra-assets=' + p for p in dep.extra_assets]
+                self.add_extension_paths(dep.extra_extension_paths)
+            elif isinstance(dep, (build.CustomTarget, build.BuildTarget)):
+                self.extra_depends.append(dep)
+            elif isinstance(dep, build.CustomTargetIndex):
+                self.extra_depends.append(dep.target)
+
+        return [f.strip('-I') for f in cflags]
+
+    def process_extra_assets(self):
+        self._extra_assets = self.kwargs.pop('extra_assets')
+
+        for assets_path in self._extra_assets:
+            self.cmd.extend(["--extra-assets", assets_path])
+
+    def process_subprojects(self):
+        value = self.kwargs.pop('subprojects')
+
+        self.process_dependencies(value)
+        self._subprojects.extend(value)
+
+    def flatten_config_command(self):
+        cmd = []
+        for arg in mesonlib.listify(self.cmd, flatten=True):
+            if isinstance(arg, mesonlib.File):
+                arg = arg.absolute_path(self.state.environment.get_source_dir(),
+                                        self.state.environment.get_build_dir())
+            elif isinstance(arg, build.IncludeDirs):
+                for inc_dir in arg.get_incdirs():
+                    cmd.append(os.path.join(self.sourcedir, arg.get_curdir(), inc_dir))
+                    cmd.append(os.path.join(self.builddir, arg.get_curdir(), inc_dir))
+
+                continue
+            elif isinstance(arg, (build.BuildTarget, build.CustomTarget)):
+                self.extra_depends.append(arg)
+                arg = self.interpreter.backend.get_target_filename_abs(arg)
+            elif isinstance(arg, build.CustomTargetIndex):
+                self.extra_depends.append(arg.target)
+                arg = self.interpreter.backend.get_target_filename_abs(arg)
+
+            cmd.append(arg)
+
+        return cmd
+
+    def generate_hotdoc_config(self):
+        cwd = os.path.abspath(os.curdir)
+        ncwd = os.path.join(self.sourcedir, self.subdir)
+        mlog.log('Generating Hotdoc configuration for: ', mlog.bold(self.name))
+        os.chdir(ncwd)
+        if self.hotdoc.run_hotdoc(self.flatten_config_command()) != 0:
+            raise MesonException('hotdoc failed to configure')
+        os.chdir(cwd)
+
+    def ensure_file(self, value):
+        if isinstance(value, list):
+            res = []
+            for val in value:
+                res.append(self.ensure_file(val))
+            return res
+
+        if isinstance(value, str):
+            return mesonlib.File.from_source_file(self.sourcedir, self.subdir, value)
+
+        return value
+
+    def ensure_dir(self, value):
+        if os.path.isabs(value):
+            _dir = value
+        else:
+            _dir = os.path.join(self.sourcedir, self.subdir, value)
+
+        if not os.path.isdir(_dir):
+            raise InvalidArguments(f'"{_dir}" is not a directory.')
+
+        return os.path.relpath(_dir, os.path.join(self.builddir, self.subdir))
+
+    def check_forbidden_args(self):
+        for arg in ['conf_file']:
+            if arg in self.kwargs:
+                raise InvalidArguments(f'Argument "{arg}" is forbidden.')
+
+    def make_targets(self):
+        self.check_forbidden_args()
+        self.process_known_arg("--index", value_processor=self.ensure_file)
+        self.process_known_arg("--project-version")
+        self.process_known_arg("--sitemap", value_processor=self.ensure_file)
+        self.process_known_arg("--html-extra-theme", value_processor=self.ensure_dir)
+        self.include_paths.update(self.ensure_dir(v) for v in self.kwargs.pop('include_paths'))
+        self.process_known_arg('--c-include-directories', argname="dependencies", value_processor=self.process_dependencies)
+        self.process_gi_c_source_roots()
+        self.process_extra_assets()
+        self.add_extension_paths(self.kwargs.pop('extra_extension_paths'))
+        self.process_subprojects()
+        self.extra_depends.extend(self.kwargs.pop('depends'))
+
+        install = self.kwargs.pop('install')
+        self.process_extra_args()
+
+        fullname = self.name + '-doc'
+        hotdoc_config_name = fullname + '.json'
+        hotdoc_config_path = os.path.join(
+            self.builddir, self.subdir, hotdoc_config_name)
+        with open(hotdoc_config_path, 'w', encoding='utf-8') as f:
+            f.write('{}')
+
+        self.cmd += ['--conf-file', hotdoc_config_path]
+        self.include_paths.add(os.path.join(self.builddir, self.subdir))
+        self.include_paths.add(os.path.join(self.sourcedir, self.subdir))
+
+        depfile = os.path.join(self.builddir, self.subdir, self.name + '.deps')
+        self.cmd += ['--deps-file-dest', depfile]
+
+        for path in self.include_paths:
+            self.cmd.extend(['--include-path', path])
+
+        if self.state.environment.coredata.get_option(mesonlib.OptionKey('werror', subproject=self.state.subproject)):
+            self.cmd.append('--fatal-warnings')
+        self.generate_hotdoc_config()
+
+        target_cmd = self.build_command + ["--internal", "hotdoc"] + \
+            self.hotdoc.get_command() + ['run', '--conf-file', hotdoc_config_name] + \
+            ['--builddir', os.path.join(self.builddir, self.subdir)]
+
+        target = HotdocTarget(fullname,
+                              subdir=self.subdir,
+                              subproject=self.state.subproject,
+                              environment=self.state.environment,
+                              hotdoc_conf=mesonlib.File.from_built_file(
+                                  self.subdir, hotdoc_config_name),
+                              extra_extension_paths=self._extra_extension_paths,
+                              extra_assets=self._extra_assets,
+                              subprojects=self._subprojects,
+                              command=target_cmd,
+                              extra_depends=self.extra_depends,
+                              outputs=[fullname],
+                              sources=[],
+                              depfile=os.path.basename(depfile),
+                              build_by_default=self.build_by_default)
+
+        install_script = None
+        if install:
+            datadir = os.path.join(self.state.get_option('prefix'), self.state.get_option('datadir'))
+            devhelp = self.kwargs.get('devhelp_activate', False)
+            if not isinstance(devhelp, bool):
+                FeatureDeprecated.single_use('hotdoc.generate_doc() devhelp_activate must be boolean', '1.1.0', self.state.subproject)
+                devhelp = False
+            if devhelp:
+                install_from = os.path.join(fullname, 'devhelp')
+                install_to = os.path.join(datadir, 'devhelp')
+            else:
+                install_from = os.path.join(fullname, 'html')
+                install_to = os.path.join(datadir, 'doc', self.name, 'html')
+
+            install_script = self.state.backend.get_executable_serialisation(self.build_command + [
+                "--internal", "hotdoc",
+                "--install", install_from,
+                "--docdir", install_to,
+                '--name', self.name,
+                '--builddir', os.path.join(self.builddir, self.subdir)] +
+                self.hotdoc.get_command() +
+                ['run', '--conf-file', hotdoc_config_name])
+            install_script.tag = 'doc'
+
+        return (target, install_script)
+
+
+class HotdocTargetHolder(CustomTargetHolder):
+    def __init__(self, target, interp):
+        super().__init__(target, interp)
+        self.methods.update({'config_path': self.config_path_method})
+
+    @noPosargs
+    @noKwargs
+    def config_path_method(self, *args, **kwargs):
+        conf = self.held_object.hotdoc_conf.absolute_path(self.interpreter.environment.source_dir,
+                                                          self.interpreter.environment.build_dir)
+        return conf
+
+
+class HotdocTarget(build.CustomTarget):
+    def __init__(self, name, subdir, subproject, hotdoc_conf, extra_extension_paths, extra_assets,
+                 subprojects, environment, **kwargs):
+        super().__init__(name, subdir, subproject, environment, **kwargs, absolute_paths=True)
+        self.hotdoc_conf = hotdoc_conf
+        self.extra_extension_paths = extra_extension_paths
+        self.extra_assets = extra_assets
+        self.subprojects = subprojects
+
+    def __getstate__(self):
+        # Make sure we do not try to pickle subprojects
+        res = self.__dict__.copy()
+        res['subprojects'] = []
+
+        return res
+
+
+class HotDocModule(ExtensionModule):
+
+    INFO = ModuleInfo('hotdoc', '0.48.0')
+
+    def __init__(self, interpreter):
+        super().__init__(interpreter)
+        self.hotdoc = ExternalProgram('hotdoc')
+        if not self.hotdoc.found():
+            raise MesonException('hotdoc executable not found')
+        version = self.hotdoc.get_version(interpreter)
+        if not mesonlib.version_compare(version, f'>={MIN_HOTDOC_VERSION}'):
+            raise MesonException(f'hotdoc {MIN_HOTDOC_VERSION} required but not found.)')
+
+        def run_hotdoc(cmd):
+            return subprocess.run(self.hotdoc.get_command() + cmd, stdout=subprocess.DEVNULL).returncode
+
+        self.hotdoc.run_hotdoc = run_hotdoc
+        self.methods.update({
+            'has_extensions': self.has_extensions,
+            'generate_doc': self.generate_doc,
+        })
+
+    @noKwargs
+    @typed_pos_args('hotdoc.has_extensions', varargs=str, min_varargs=1)
+    def has_extensions(self, state, args, kwargs):
+        return self.hotdoc.run_hotdoc([f'--has-extension={extension}' for extension in args[0]]) == 0
+
+    @typed_pos_args('hotdoc.generate_doc', str)
+    @typed_kwargs(
+        'hotdoc.generate_doc',
+        KwargInfo('sitemap', file_types, required=True),
+        KwargInfo('index', file_types, required=True),
+        KwargInfo('project_version', str, required=True),
+        KwargInfo('html_extra_theme', (str, NoneType)),
+        KwargInfo('include_paths', ContainerTypeInfo(list, str), listify=True, default=[]),
+        # --c-include-directories
+        KwargInfo(
+            'dependencies',
+            ContainerTypeInfo(list, (Dependency, build.StaticLibrary, build.SharedLibrary,
+                                     build.CustomTarget, build.CustomTargetIndex)),
+            listify=True,
+            default=[],
+        ),
+        KwargInfo(
+            'depends',
+            ContainerTypeInfo(list, (build.CustomTarget, build.CustomTargetIndex)),
+            listify=True,
+            default=[],
+            since='0.64.1',
+        ),
+        KwargInfo('gi_c_source_roots', ContainerTypeInfo(list, str), listify=True, default=[]),
+        KwargInfo('extra_assets', ContainerTypeInfo(list, str), listify=True, default=[]),
+        KwargInfo('extra_extension_paths', ContainerTypeInfo(list, str), listify=True, default=[]),
+        KwargInfo('subprojects', ContainerTypeInfo(list, HotdocTarget), listify=True, default=[]),
+        KwargInfo('install', bool, default=False),
+        allow_unknown=True
+    )
+    def generate_doc(self, state, args, kwargs):
+        project_name = args[0]
+        if any(isinstance(x, (build.CustomTarget, build.CustomTargetIndex)) for x in kwargs['dependencies']):
+            FeatureDeprecated.single_use('hotdoc.generate_doc dependencies argument with custom_target',
+                                         '0.64.1', state.subproject, 'use `depends`', state.current_node)
+        builder = HotdocTargetBuilder(project_name, state, self.hotdoc, self.interpreter, kwargs)
+        target, install_script = builder.make_targets()
+        targets = [target]
+        if install_script:
+            targets.append(install_script)
+
+        return ModuleReturnValue(targets[0], targets)
+
+
+def initialize(interpreter):
+    mod = HotDocModule(interpreter)
+    mod.interpreter.append_holder_map(HotdocTarget, HotdocTargetHolder)
+    return mod
diff --git a/vendored-meson/meson/mesonbuild/modules/i18n.py b/vendored-meson/meson/mesonbuild/modules/i18n.py
new file mode 100644
index 000000000000..11dd9ef2cd65
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/i18n.py
@@ -0,0 +1,399 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from os import path
+import typing as T
+
+from . import ExtensionModule, ModuleReturnValue, ModuleInfo
+from .. import build
+from .. import mesonlib
+from .. import mlog
+from ..interpreter.type_checking import CT_BUILD_BY_DEFAULT, CT_INPUT_KW, INSTALL_TAG_KW, OUTPUT_KW, INSTALL_DIR_KW, INSTALL_KW, NoneType, in_set_validator
+from ..interpreterbase import FeatureNew
+from ..interpreterbase.decorators import ContainerTypeInfo, KwargInfo, noPosargs, typed_kwargs, typed_pos_args
+from ..programs import ExternalProgram
+from ..scripts.gettext import read_linguas
+
+if T.TYPE_CHECKING:
+    from typing_extensions import Literal, TypedDict
+
+    from . import ModuleState
+    from ..build import Target
+    from ..interpreter import Interpreter
+    from ..interpreterbase import TYPE_var
+
+    class MergeFile(TypedDict):
+
+        input: T.List[T.Union[
+            str, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex,
+            build.ExtractedObjects, build.GeneratedList, ExternalProgram,
+            mesonlib.File]]
+        output: str
+        build_by_default: bool
+        install: bool
+        install_dir: T.Optional[str]
+        install_tag: T.Optional[str]
+        args: T.List[str]
+        data_dirs: T.List[str]
+        po_dir: str
+        type: Literal['xml', 'desktop']
+
+    class Gettext(TypedDict):
+
+        args: T.List[str]
+        data_dirs: T.List[str]
+        install: bool
+        install_dir: T.Optional[str]
+        languages: T.List[str]
+        preset: T.Optional[str]
+
+    class ItsJoinFile(TypedDict):
+
+        input: T.List[T.Union[
+            str, build.BuildTarget, build.CustomTarget, build.CustomTargetIndex,
+            build.ExtractedObjects, build.GeneratedList, ExternalProgram,
+            mesonlib.File]]
+        output: str
+        build_by_default: bool
+        install: bool
+        install_dir: T.Optional[str]
+        install_tag: T.Optional[str]
+        its_files: T.List[str]
+        mo_targets: T.List[T.Union[build.BuildTarget, build.CustomTarget, build.CustomTargetIndex]]
+
+
+_ARGS: KwargInfo[T.List[str]] = KwargInfo(
+    'args',
+    ContainerTypeInfo(list, str),
+    default=[],
+    listify=True,
+)
+
+_DATA_DIRS: KwargInfo[T.List[str]] = KwargInfo(
+    'data_dirs',
+    ContainerTypeInfo(list, str),
+    default=[],
+    listify=True
+)
+
+PRESET_ARGS = {
+    'glib': [
+        '--from-code=UTF-8',
+        '--add-comments',
+
+        # https://developer.gnome.org/glib/stable/glib-I18N.html
+        '--keyword=_',
+        '--keyword=N_',
+        '--keyword=C_:1c,2',
+        '--keyword=NC_:1c,2',
+        '--keyword=g_dcgettext:2',
+        '--keyword=g_dngettext:2,3',
+        '--keyword=g_dpgettext2:2c,3',
+
+        '--flag=N_:1:pass-c-format',
+        '--flag=C_:2:pass-c-format',
+        '--flag=NC_:2:pass-c-format',
+        '--flag=g_dngettext:2:pass-c-format',
+        '--flag=g_strdup_printf:1:c-format',
+        '--flag=g_string_printf:2:c-format',
+        '--flag=g_string_append_printf:2:c-format',
+        '--flag=g_error_new:3:c-format',
+        '--flag=g_set_error:4:c-format',
+        '--flag=g_markup_printf_escaped:1:c-format',
+        '--flag=g_log:3:c-format',
+        '--flag=g_print:1:c-format',
+        '--flag=g_printerr:1:c-format',
+        '--flag=g_printf:1:c-format',
+        '--flag=g_fprintf:2:c-format',
+        '--flag=g_sprintf:2:c-format',
+        '--flag=g_snprintf:3:c-format',
+    ]
+}
+
+
+class I18nModule(ExtensionModule):
+
+    INFO = ModuleInfo('i18n')
+
+    def __init__(self, interpreter: 'Interpreter'):
+        super().__init__(interpreter)
+        self.methods.update({
+            'merge_file': self.merge_file,
+            'gettext': self.gettext,
+            'itstool_join': self.itstool_join,
+        })
+        self.tools: T.Dict[str, T.Optional[T.Union[ExternalProgram, build.Executable]]] = {
+            'itstool': None,
+            'msgfmt': None,
+            'msginit': None,
+            'msgmerge': None,
+            'xgettext': None,
+        }
+
+    @staticmethod
+    def _get_data_dirs(state: 'ModuleState', dirs: T.Iterable[str]) -> T.List[str]:
+        """Returns source directories of relative paths"""
+        src_dir = path.join(state.environment.get_source_dir(), state.subdir)
+        return [path.join(src_dir, d) for d in dirs]
+
+    @FeatureNew('i18n.merge_file', '0.37.0')
+    @noPosargs
+    @typed_kwargs(
+        'i18n.merge_file',
+        CT_BUILD_BY_DEFAULT,
+        CT_INPUT_KW,
+        KwargInfo('install_dir', (str, NoneType)),
+        INSTALL_TAG_KW,
+        OUTPUT_KW,
+        INSTALL_KW,
+        _ARGS.evolve(since='0.51.0'),
+        _DATA_DIRS.evolve(since='0.41.0'),
+        KwargInfo('po_dir', str, required=True),
+        KwargInfo('type', str, default='xml', validator=in_set_validator({'xml', 'desktop'})),
+    )
+    def merge_file(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: 'MergeFile') -> ModuleReturnValue:
+        if self.tools['msgfmt'] is None or not self.tools['msgfmt'].found():
+            self.tools['msgfmt'] = state.find_program('msgfmt', for_machine=mesonlib.MachineChoice.BUILD)
+        if isinstance(self.tools['msgfmt'], ExternalProgram):
+            try:
+                have_version = self.tools['msgfmt'].get_version()
+            except mesonlib.MesonException as e:
+                raise mesonlib.MesonException('i18n.merge_file requires GNU msgfmt') from e
+            want_version = '>=0.19' if kwargs['type'] == 'desktop' else '>=0.19.7'
+            if not mesonlib.version_compare(have_version, want_version):
+                msg = f'i18n.merge_file requires GNU msgfmt {want_version} to produce files of type: ' + kwargs['type'] + f' (got: {have_version})'
+                raise mesonlib.MesonException(msg)
+        podir = path.join(state.build_to_src, state.subdir, kwargs['po_dir'])
+
+        ddirs = self._get_data_dirs(state, kwargs['data_dirs'])
+        datadirs = '--datadirs=' + ':'.join(ddirs) if ddirs else None
+
+        command: T.List[T.Union[str, build.BuildTarget, build.CustomTarget,
+                                build.CustomTargetIndex, 'ExternalProgram', mesonlib.File]] = []
+        command.extend(state.environment.get_build_command())
+        command.extend([
+            '--internal', 'msgfmthelper',
+            '--msgfmt=' + self.tools['msgfmt'].get_path(),
+        ])
+        if datadirs:
+            command.append(datadirs)
+        command.extend(['@INPUT@', '@OUTPUT@', kwargs['type'], podir])
+        if kwargs['args']:
+            command.append('--')
+            command.extend(kwargs['args'])
+
+        build_by_default = kwargs['build_by_default']
+        if build_by_default is None:
+            build_by_default = kwargs['install']
+
+        install_tag = [kwargs['install_tag']] if kwargs['install_tag'] is not None else None
+
+        ct = build.CustomTarget(
+            '',
+            state.subdir,
+            state.subproject,
+            state.environment,
+            command,
+            kwargs['input'],
+            [kwargs['output']],
+            build_by_default=build_by_default,
+            install=kwargs['install'],
+            install_dir=[kwargs['install_dir']] if kwargs['install_dir'] is not None else None,
+            install_tag=install_tag,
+        )
+
+        return ModuleReturnValue(ct, [ct])
+
+    @typed_pos_args('i18n.gettext', str)
+    @typed_kwargs(
+        'i18n.gettext',
+        _ARGS,
+        _DATA_DIRS.evolve(since='0.36.0'),
+        INSTALL_KW.evolve(default=True),
+        INSTALL_DIR_KW.evolve(since='0.50.0'),
+        KwargInfo('languages', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo(
+            'preset',
+            (str, NoneType),
+            validator=in_set_validator(set(PRESET_ARGS)),
+            since='0.37.0',
+        ),
+    )
+    def gettext(self, state: 'ModuleState', args: T.Tuple[str], kwargs: 'Gettext') -> ModuleReturnValue:
+        for tool, strict in [('msgfmt', True), ('msginit', False), ('msgmerge', False), ('xgettext', False)]:
+            if self.tools[tool] is None:
+                self.tools[tool] = state.find_program(tool, required=False, for_machine=mesonlib.MachineChoice.BUILD)
+            # still not found?
+            if not self.tools[tool].found():
+                if strict:
+                    mlog.warning('Gettext not found, all translation (po) targets will be ignored.',
+                                 once=True, location=state.current_node)
+                    return ModuleReturnValue(None, [])
+                else:
+                    mlog.warning(f'{tool!r} not found, maintainer targets will not work',
+                                 once=True, fatal=False, location=state.current_node)
+        packagename = args[0]
+        pkg_arg = f'--pkgname={packagename}'
+
+        languages = kwargs['languages']
+        lang_arg = '--langs=' + '@@'.join(languages) if languages else None
+
+        _datadirs = ':'.join(self._get_data_dirs(state, kwargs['data_dirs']))
+        datadirs = f'--datadirs={_datadirs}' if _datadirs else None
+
+        extra_args = kwargs['args']
+        targets: T.List['Target'] = []
+        gmotargets: T.List['build.CustomTarget'] = []
+
+        preset = kwargs['preset']
+        if preset:
+            preset_args = PRESET_ARGS[preset]
+            extra_args = list(mesonlib.OrderedSet(preset_args + extra_args))
+
+        extra_arg = '--extra-args=' + '@@'.join(extra_args) if extra_args else None
+
+        source_root = path.join(state.source_root, state.root_subdir)
+        subdir = path.relpath(state.subdir, start=state.root_subdir) if state.subdir else None
+
+        potargs = state.environment.get_build_command() + ['--internal', 'gettext', 'pot', pkg_arg]
+        potargs.append(f'--source-root={source_root}')
+        if subdir:
+            potargs.append(f'--subdir={subdir}')
+        if datadirs:
+            potargs.append(datadirs)
+        if extra_arg:
+            potargs.append(extra_arg)
+        if self.tools['xgettext'].found():
+            potargs.append('--xgettext=' + self.tools['xgettext'].get_path())
+        pottarget = build.RunTarget(packagename + '-pot', potargs, [], state.subdir, state.subproject,
+                                    state.environment, default_env=False)
+        targets.append(pottarget)
+
+        install = kwargs['install']
+        install_dir = kwargs['install_dir'] or state.environment.coredata.get_option(mesonlib.OptionKey('localedir'))
+        assert isinstance(install_dir, str), 'for mypy'
+        if not languages:
+            languages = read_linguas(path.join(state.environment.source_dir, state.subdir))
+        for l in languages:
+            po_file = mesonlib.File.from_source_file(state.environment.source_dir,
+                                                     state.subdir, l+'.po')
+            gmotarget = build.CustomTarget(
+                f'{packagename}-{l}.mo',
+                path.join(state.subdir, l, 'LC_MESSAGES'),
+                state.subproject,
+                state.environment,
+                [self.tools['msgfmt'], '-o', '@OUTPUT@', '@INPUT@'],
+                [po_file],
+                [f'{packagename}.mo'],
+                install=install,
+                # We have multiple files all installed as packagename+'.mo' in different install subdirs.
+                # What we really wanted to do, probably, is have a rename: kwarg, but that's not available
+                # to custom_targets. Crude hack: set the build target's subdir manually.
+                # Bonus: the build tree has something usable as an uninstalled bindtextdomain() target dir.
+                install_dir=[path.join(install_dir, l, 'LC_MESSAGES')],
+                install_tag=['i18n'],
+            )
+            targets.append(gmotarget)
+            gmotargets.append(gmotarget)
+
+        allgmotarget = build.AliasTarget(packagename + '-gmo', gmotargets, state.subdir, state.subproject,
+                                         state.environment)
+        targets.append(allgmotarget)
+
+        updatepoargs = state.environment.get_build_command() + ['--internal', 'gettext', 'update_po', pkg_arg]
+        updatepoargs.append(f'--source-root={source_root}')
+        if subdir:
+            updatepoargs.append(f'--subdir={subdir}')
+        if lang_arg:
+            updatepoargs.append(lang_arg)
+        if datadirs:
+            updatepoargs.append(datadirs)
+        if extra_arg:
+            updatepoargs.append(extra_arg)
+        for tool in ['msginit', 'msgmerge']:
+            if self.tools[tool].found():
+                updatepoargs.append(f'--{tool}=' + self.tools[tool].get_path())
+        updatepotarget = build.RunTarget(packagename + '-update-po', updatepoargs, [], state.subdir, state.subproject,
+                                         state.environment, default_env=False)
+        targets.append(updatepotarget)
+
+        return ModuleReturnValue([gmotargets, pottarget, updatepotarget], targets)
+
+    @FeatureNew('i18n.itstool_join', '0.62.0')
+    @noPosargs
+    @typed_kwargs(
+        'i18n.itstool_join',
+        CT_BUILD_BY_DEFAULT,
+        CT_INPUT_KW,
+        KwargInfo('install_dir', (str, NoneType)),
+        INSTALL_TAG_KW,
+        OUTPUT_KW,
+        INSTALL_KW,
+        _ARGS.evolve(),
+        KwargInfo('its_files', ContainerTypeInfo(list, str)),
+        KwargInfo('mo_targets', ContainerTypeInfo(list, build.CustomTarget), required=True),
+    )
+    def itstool_join(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: 'ItsJoinFile') -> ModuleReturnValue:
+        if self.tools['itstool'] is None:
+            self.tools['itstool'] = state.find_program('itstool', for_machine=mesonlib.MachineChoice.BUILD)
+        mo_targets = kwargs['mo_targets']
+        its_files = kwargs.get('its_files', [])
+
+        mo_fnames = []
+        for target in mo_targets:
+            mo_fnames.append(path.join(target.get_subdir(), target.get_outputs()[0]))
+
+        command: T.List[T.Union[str, build.BuildTarget, build.CustomTarget,
+                                build.CustomTargetIndex, 'ExternalProgram', mesonlib.File]] = []
+        command.extend(state.environment.get_build_command())
+        command.extend([
+            '--internal', 'itstool', 'join',
+            '-i', '@INPUT@',
+            '-o', '@OUTPUT@',
+            '--itstool=' + self.tools['itstool'].get_path(),
+        ])
+        if its_files:
+            for fname in its_files:
+                if not path.isabs(fname):
+                    fname = path.join(state.environment.source_dir, state.subdir, fname)
+                command.extend(['--its', fname])
+        command.extend(mo_fnames)
+
+        build_by_default = kwargs['build_by_default']
+        if build_by_default is None:
+            build_by_default = kwargs['install']
+
+        install_tag = [kwargs['install_tag']] if kwargs['install_tag'] is not None else None
+
+        ct = build.CustomTarget(
+            '',
+            state.subdir,
+            state.subproject,
+            state.environment,
+            command,
+            kwargs['input'],
+            [kwargs['output']],
+            build_by_default=build_by_default,
+            extra_depends=mo_targets,
+            install=kwargs['install'],
+            install_dir=[kwargs['install_dir']] if kwargs['install_dir'] is not None else None,
+            install_tag=install_tag,
+        )
+
+        return ModuleReturnValue(ct, [ct])
+
+
+def initialize(interp: 'Interpreter') -> I18nModule:
+    return I18nModule(interp)
diff --git a/vendored-meson/meson/mesonbuild/modules/icestorm.py b/vendored-meson/meson/mesonbuild/modules/icestorm.py
new file mode 100644
index 000000000000..8c1c6f123d94
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/icestorm.py
@@ -0,0 +1,131 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+import itertools
+import typing as T
+
+from . import ExtensionModule, ModuleReturnValue, ModuleInfo
+from .. import build
+from .. import mesonlib
+from ..interpreter.type_checking import CT_INPUT_KW
+from ..interpreterbase.decorators import KwargInfo, typed_kwargs, typed_pos_args
+
+if T.TYPE_CHECKING:
+    from typing_extensions import TypedDict
+
+    from . import ModuleState
+    from ..interpreter import Interpreter
+    from ..programs import ExternalProgram
+
+    class ProjectKwargs(TypedDict):
+
+        sources: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]]
+        constraint_file: T.Union[mesonlib.FileOrString, build.GeneratedTypes]
+
+class IceStormModule(ExtensionModule):
+
+    INFO = ModuleInfo('FPGA/Icestorm', '0.45.0', unstable=True)
+
+    def __init__(self, interpreter: Interpreter) -> None:
+        super().__init__(interpreter)
+        self.tools: T.Dict[str, T.Union[ExternalProgram, build.Executable]] = {}
+        self.methods.update({
+            'project': self.project,
+        })
+
+    def detect_tools(self, state: ModuleState) -> None:
+        self.tools['yosys'] = state.find_program('yosys')
+        self.tools['arachne'] = state.find_program('arachne-pnr')
+        self.tools['icepack'] = state.find_program('icepack')
+        self.tools['iceprog'] = state.find_program('iceprog')
+        self.tools['icetime'] = state.find_program('icetime')
+
+    @typed_pos_args('icestorm.project', str,
+                    varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex,
+                             build.GeneratedList))
+    @typed_kwargs(
+        'icestorm.project',
+        CT_INPUT_KW.evolve(name='sources'),
+        KwargInfo(
+            'constraint_file',
+            (str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList),
+            required=True,
+        )
+    )
+    def project(self, state: ModuleState,
+                args: T.Tuple[str, T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]]],
+                kwargs: ProjectKwargs) -> ModuleReturnValue:
+        if not self.tools:
+            self.detect_tools(state)
+        proj_name, arg_sources = args
+        all_sources = self.interpreter.source_strings_to_files(
+            list(itertools.chain(arg_sources, kwargs['sources'])))
+
+        blif_target = build.CustomTarget(
+            f'{proj_name}_blif',
+            state.subdir,
+            state.subproject,
+            state.environment,
+            [self.tools['yosys'], '-q', '-p', 'synth_ice40 -blif @OUTPUT@', '@INPUT@'],
+            all_sources,
+            [f'{proj_name}.blif'],
+        )
+
+        asc_target = build.CustomTarget(
+            f'{proj_name}_asc',
+            state.subdir,
+            state.subproject,
+            state.environment,
+            [self.tools['arachne'], '-q', '-d', '1k', '-p', '@INPUT@', '-o', '@OUTPUT@'],
+            [kwargs['constraint_file'], blif_target],
+            [f'{proj_name}.asc'],
+        )
+
+        bin_target = build.CustomTarget(
+            f'{proj_name}_bin',
+            state.subdir,
+            state.subproject,
+            state.environment,
+            [self.tools['icepack'], '@INPUT@', '@OUTPUT@'],
+            [asc_target],
+            [f'{proj_name}.bin'],
+            build_by_default=True,
+        )
+
+        upload_target = build.RunTarget(
+            f'{proj_name}-upload',
+            [self.tools['iceprog'], bin_target],
+            [],
+            state.subdir,
+            state.subproject,
+            state.environment,
+        )
+
+        time_target = build.RunTarget(
+            f'{proj_name}-time',
+            [self.tools['icetime'], bin_target],
+            [],
+            state.subdir,
+            state.subproject,
+            state.environment,
+        )
+
+        return ModuleReturnValue(
+            None,
+            [blif_target, asc_target, bin_target, upload_target, time_target])
+
+
+def initialize(interp: Interpreter) -> IceStormModule:
+    return IceStormModule(interp)
diff --git a/vendored-meson/meson/mesonbuild/modules/java.py b/vendored-meson/meson/mesonbuild/modules/java.py
new file mode 100644
index 000000000000..f6e448454924
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/java.py
@@ -0,0 +1,117 @@
+# Copyright 2021 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import pathlib
+import typing as T
+
+from mesonbuild import mesonlib
+from mesonbuild.build import CustomTarget, CustomTargetIndex, GeneratedList, Target
+from mesonbuild.compilers import detect_compiler_for
+from mesonbuild.interpreterbase.decorators import ContainerTypeInfo, FeatureDeprecated, FeatureNew, KwargInfo, typed_pos_args, typed_kwargs
+from mesonbuild.mesonlib import version_compare, MachineChoice
+from . import NewExtensionModule, ModuleReturnValue, ModuleInfo
+from ..interpreter.type_checking import NoneType
+
+if T.TYPE_CHECKING:
+    from . import ModuleState
+    from ..compilers import Compiler
+    from ..interpreter import Interpreter
+
+class JavaModule(NewExtensionModule):
+
+    INFO = ModuleInfo('java', '0.60.0')
+
+    def __init__(self, interpreter: Interpreter):
+        super().__init__()
+        self.methods.update({
+            'generate_native_headers': self.generate_native_headers,
+            'native_headers': self.native_headers,
+        })
+
+    def __get_java_compiler(self, state: ModuleState) -> Compiler:
+        if 'java' not in state.environment.coredata.compilers[MachineChoice.BUILD]:
+            detect_compiler_for(state.environment, 'java', MachineChoice.BUILD, False)
+        return state.environment.coredata.compilers[MachineChoice.BUILD]['java']
+
+    @FeatureNew('java.generate_native_headers', '0.62.0')
+    @FeatureDeprecated('java.generate_native_headers', '1.0.0')
+    @typed_pos_args(
+        'java.generate_native_headers',
+        varargs=(str, mesonlib.File, Target, CustomTargetIndex, GeneratedList))
+    @typed_kwargs(
+        'java.generate_native_headers',
+        KwargInfo('classes', ContainerTypeInfo(list, str), default=[], listify=True, required=True),
+        KwargInfo('package', (str, NoneType), default=None))
+    def generate_native_headers(self, state: ModuleState, args: T.Tuple[T.List[mesonlib.FileOrString]],
+                                kwargs: T.Dict[str, T.Optional[str]]) -> ModuleReturnValue:
+        return self.__native_headers(state, args, kwargs)
+
+    @FeatureNew('java.native_headers', '1.0.0')
+    @typed_pos_args(
+        'java.native_headers',
+        varargs=(str, mesonlib.File, Target, CustomTargetIndex, GeneratedList))
+    @typed_kwargs(
+        'java.native_headers',
+        KwargInfo('classes', ContainerTypeInfo(list, str), default=[], listify=True, required=True),
+        KwargInfo('package', (str, NoneType), default=None))
+    def native_headers(self, state: ModuleState, args: T.Tuple[T.List[mesonlib.FileOrString]],
+                       kwargs: T.Dict[str, T.Optional[str]]) -> ModuleReturnValue:
+        return self.__native_headers(state, args, kwargs)
+
+    def __native_headers(self, state: ModuleState, args: T.Tuple[T.List[mesonlib.FileOrString]],
+                         kwargs: T.Dict[str, T.Optional[str]]) -> ModuleReturnValue:
+        classes = T.cast('T.List[str]', kwargs.get('classes'))
+        package = kwargs.get('package')
+
+        if package:
+            sanitized_package = package.replace("-", "_").replace(".", "_")
+
+        headers: T.List[str] = []
+        for clazz in classes:
+            sanitized_clazz = clazz.replace(".", "_")
+            if package:
+                headers.append(f'{sanitized_package}_{sanitized_clazz}.h')
+            else:
+                headers.append(f'{sanitized_clazz}.h')
+
+        javac = self.__get_java_compiler(state)
+
+        command = mesonlib.listify([
+            javac.exelist,
+            '-d',
+            '@PRIVATE_DIR@',
+            '-h',
+            state.subdir,
+            '@INPUT@',
+        ])
+
+        prefix = classes[0] if not package else package
+
+        target = CustomTarget(f'{prefix}-native-headers',
+                              state.subdir,
+                              state.subproject,
+                              state.environment,
+                              command,
+                              sources=args[0], outputs=headers, backend=state.backend)
+
+        # It is only known that 1.8.0 won't pre-create the directory. 11 and 16
+        # do not exhibit this behavior.
+        if version_compare(javac.version, '1.8.0'):
+            pathlib.Path(state.backend.get_target_private_dir_abs(target)).mkdir(parents=True, exist_ok=True)
+
+        return ModuleReturnValue(target, [target])
+
+def initialize(*args: T.Any, **kwargs: T.Any) -> JavaModule:
+    return JavaModule(*args, **kwargs)
diff --git a/vendored-meson/meson/mesonbuild/modules/keyval.py b/vendored-meson/meson/mesonbuild/modules/keyval.py
new file mode 100644
index 000000000000..48afe814998a
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/keyval.py
@@ -0,0 +1,75 @@
+# Copyright 2017, 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import typing as T
+
+from . import ExtensionModule, ModuleInfo
+from .. import mesonlib
+from ..interpreterbase import noKwargs, typed_pos_args
+
+if T.TYPE_CHECKING:
+    from ..interpreter import Interpreter
+    from . import ModuleState
+
+class KeyvalModule(ExtensionModule):
+
+    INFO = ModuleInfo('keyval', '0.55.0', stabilized='0.56.0')
+
+    def __init__(self, interp: 'Interpreter'):
+        super().__init__(interp)
+        self.methods.update({
+            'load': self.load,
+        })
+
+    @staticmethod
+    def _load_file(path_to_config: str) -> T.Dict[str, str]:
+        result: T.Dict[str, str] = {}
+        try:
+            with open(path_to_config, encoding='utf-8') as f:
+                for line in f:
+                    if '#' in line:
+                        comment_idx = line.index('#')
+                        line = line[:comment_idx]
+                    line = line.strip()
+                    try:
+                        name, val = line.split('=', 1)
+                    except ValueError:
+                        continue
+                    result[name.strip()] = val.strip()
+        except OSError as e:
+            raise mesonlib.MesonException(f'Failed to load {path_to_config}: {e}')
+
+        return result
+
+    @noKwargs
+    @typed_pos_args('keyval.load', (str, mesonlib.File))
+    def load(self, state: 'ModuleState', args: T.Tuple['mesonlib.FileOrString'], kwargs: T.Dict[str, T.Any]) -> T.Dict[str, str]:
+        s = args[0]
+        is_built = False
+        if isinstance(s, mesonlib.File):
+            is_built = is_built or s.is_built
+            s = s.absolute_path(self.interpreter.environment.source_dir, self.interpreter.environment.build_dir)
+        else:
+            s = os.path.join(self.interpreter.environment.source_dir, s)
+
+        if not is_built:
+            self.interpreter.build_def_files.add(s)
+
+        return self._load_file(s)
+
+
+def initialize(interp: 'Interpreter') -> KeyvalModule:
+    return KeyvalModule(interp)
diff --git a/vendored-meson/meson/mesonbuild/modules/modtest.py b/vendored-meson/meson/mesonbuild/modules/modtest.py
new file mode 100644
index 000000000000..15f823778618
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/modtest.py
@@ -0,0 +1,44 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+import typing as T
+
+from . import ExtensionModule, ModuleInfo
+from ..interpreterbase import noKwargs, noPosargs
+
+if T.TYPE_CHECKING:
+    from . import ModuleState
+    from ..interpreter.interpreter import Interpreter
+    from ..interpreterbase.baseobjects import TYPE_kwargs, TYPE_var
+
+
+class TestModule(ExtensionModule):
+
+    INFO = ModuleInfo('modtest')
+
+    def __init__(self, interpreter: Interpreter) -> None:
+        super().__init__(interpreter)
+        self.methods.update({
+            'print_hello': self.print_hello,
+        })
+
+    @noKwargs
+    @noPosargs
+    def print_hello(self, state: ModuleState, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> None:
+        print('Hello from a Meson module')
+
+
+def initialize(interp: Interpreter) -> TestModule:
+    return TestModule(interp)
diff --git a/vendored-meson/meson/mesonbuild/modules/pkgconfig.py b/vendored-meson/meson/mesonbuild/modules/pkgconfig.py
new file mode 100644
index 000000000000..921fb666167b
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/pkgconfig.py
@@ -0,0 +1,747 @@
+# Copyright 2015-2022 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+from collections import defaultdict
+from dataclasses import dataclass
+from pathlib import PurePath
+import os
+import typing as T
+
+from . import NewExtensionModule, ModuleInfo
+from . import ModuleReturnValue
+from .. import build
+from .. import dependencies
+from .. import mesonlib
+from .. import mlog
+from ..coredata import BUILTIN_DIR_OPTIONS
+from ..dependencies.pkgconfig import PkgConfigDependency
+from ..interpreter.type_checking import D_MODULE_VERSIONS_KW, INSTALL_DIR_KW, VARIABLES_KW, NoneType
+from ..interpreterbase import FeatureNew, FeatureDeprecated
+from ..interpreterbase.decorators import ContainerTypeInfo, KwargInfo, typed_kwargs, typed_pos_args
+
+if T.TYPE_CHECKING:
+    from typing_extensions import TypedDict
+
+    from . import ModuleState
+    from .. import mparser
+    from ..interpreter import Interpreter
+
+    ANY_DEP = T.Union[dependencies.Dependency, build.BuildTargetTypes, str]
+    LIBS = T.Union[build.LibTypes, str]
+
+    class GenerateKw(TypedDict):
+
+        version: T.Optional[str]
+        name: T.Optional[str]
+        filebase: T.Optional[str]
+        description: T.Optional[str]
+        url: str
+        subdirs: T.List[str]
+        conflicts: T.List[str]
+        dataonly: bool
+        libraries: T.List[ANY_DEP]
+        libraries_private: T.List[ANY_DEP]
+        requires: T.List[T.Union[str, build.StaticLibrary, build.SharedLibrary, dependencies.Dependency]]
+        requires_private: T.List[T.Union[str, build.StaticLibrary, build.SharedLibrary, dependencies.Dependency]]
+        install_dir: T.Optional[str]
+        d_module_versions: T.List[T.Union[str, int]]
+        extra_cflags: T.List[str]
+        variables: T.Dict[str, str]
+        uninstalled_variables: T.Dict[str, str]
+        unescaped_variables: T.Dict[str, str]
+        unescaped_uninstalled_variables: T.Dict[str, str]
+
+
+_PKG_LIBRARIES: KwargInfo[T.List[T.Union[str, dependencies.Dependency, build.SharedLibrary, build.StaticLibrary, build.CustomTarget, build.CustomTargetIndex]]] = KwargInfo(
+    'libraries',
+    ContainerTypeInfo(list, (str, dependencies.Dependency,
+                             build.SharedLibrary, build.StaticLibrary,
+                             build.CustomTarget, build.CustomTargetIndex)),
+    default=[],
+    listify=True,
+)
+
+_PKG_REQUIRES: KwargInfo[T.List[T.Union[str, build.SharedLibrary, build.StaticLibrary, dependencies.Dependency]]] = KwargInfo(
+    'requires',
+    ContainerTypeInfo(list, (str, build.SharedLibrary, build.StaticLibrary, dependencies.Dependency)),
+    default=[],
+    listify=True,
+)
+
+
+def _as_str(obj: object) -> str:
+    assert isinstance(obj, str)
+    return obj
+
+
+@dataclass
+class MetaData:
+
+    filebase: str
+    display_name: str
+    location: mparser.BaseNode
+    warned: bool = False
+
+
+class DependenciesHelper:
+    def __init__(self, state: ModuleState, name: str, metadata: T.Dict[str, MetaData]) -> None:
+        self.state = state
+        self.name = name
+        self.metadata = metadata
+        self.pub_libs: T.List[LIBS] = []
+        self.pub_reqs: T.List[str] = []
+        self.priv_libs: T.List[LIBS] = []
+        self.priv_reqs: T.List[str] = []
+        self.cflags: T.List[str] = []
+        self.version_reqs: T.DefaultDict[str, T.Set[str]] = defaultdict(set)
+        self.link_whole_targets: T.List[T.Union[build.CustomTarget, build.CustomTargetIndex, build.StaticLibrary]] = []
+        self.uninstalled_incdirs: mesonlib.OrderedSet[str] = mesonlib.OrderedSet()
+
+    def add_pub_libs(self, libs: T.List[ANY_DEP]) -> None:
+        p_libs, reqs, cflags = self._process_libs(libs, True)
+        self.pub_libs = p_libs + self.pub_libs # prepend to preserve dependencies
+        self.pub_reqs += reqs
+        self.cflags += cflags
+
+    def add_priv_libs(self, libs: T.List[ANY_DEP]) -> None:
+        p_libs, reqs, _ = self._process_libs(libs, False)
+        self.priv_libs = p_libs + self.priv_libs
+        self.priv_reqs += reqs
+
+    def add_pub_reqs(self, reqs: T.List[T.Union[str, build.StaticLibrary, build.SharedLibrary, dependencies.Dependency]]) -> None:
+        self.pub_reqs += self._process_reqs(reqs)
+
+    def add_priv_reqs(self, reqs: T.List[T.Union[str, build.StaticLibrary, build.SharedLibrary, dependencies.Dependency]]) -> None:
+        self.priv_reqs += self._process_reqs(reqs)
+
+    def _check_generated_pc_deprecation(self, obj: T.Union[build.CustomTarget, build.CustomTargetIndex, build.StaticLibrary, build.SharedLibrary]) -> None:
+        if obj.get_id() in self.metadata:
+            return
+        data = self.metadata[obj.get_id()]
+        if data.warned:
+            return
+        mlog.deprecation('Library', mlog.bold(obj.name), 'was passed to the '
+                         '"libraries" keyword argument of a previous call '
+                         'to generate() method instead of first positional '
+                         'argument.', 'Adding', mlog.bold(data.display_name),
+                         'to "Requires" field, but this is a deprecated '
+                         'behaviour that will change in a future version '
+                         'of Meson. Please report the issue if this '
+                         'warning cannot be avoided in your case.',
+                         location=data.location)
+        data.warned = True
+
+    def _process_reqs(self, reqs: T.Sequence[T.Union[str, build.StaticLibrary, build.SharedLibrary, dependencies.Dependency]]) -> T.List[str]:
+        '''Returns string names of requirements'''
+        processed_reqs: T.List[str] = []
+        for obj in mesonlib.listify(reqs):
+            if not isinstance(obj, str):
+                FeatureNew.single_use('pkgconfig.generate requirement from non-string object', '0.46.0', self.state.subproject)
+            if (isinstance(obj, (build.CustomTarget, build.CustomTargetIndex, build.SharedLibrary, build.StaticLibrary))
+                    and obj.get_id() in self.metadata):
+                self._check_generated_pc_deprecation(obj)
+                processed_reqs.append(self.metadata[obj.get_id()].filebase)
+            elif isinstance(obj, PkgConfigDependency):
+                if obj.found():
+                    processed_reqs.append(obj.name)
+                    self.add_version_reqs(obj.name, obj.version_reqs)
+            elif isinstance(obj, str):
+                name, version_req = self.split_version_req(obj)
+                processed_reqs.append(name)
+                self.add_version_reqs(name, [version_req] if version_req is not None else None)
+            elif isinstance(obj, dependencies.Dependency) and not obj.found():
+                pass
+            elif isinstance(obj, dependencies.ExternalDependency) and obj.name == 'threads':
+                pass
+            else:
+                raise mesonlib.MesonException('requires argument not a string, '
+                                              'library with pkgconfig-generated file '
+                                              f'or pkgconfig-dependency object, got {obj!r}')
+        return processed_reqs
+
+    def add_cflags(self, cflags: T.List[str]) -> None:
+        self.cflags += mesonlib.stringlistify(cflags)
+
+    def _add_uninstalled_incdirs(self, incdirs: T.List[build.IncludeDirs], subdir: T.Optional[str] = None) -> None:
+        for i in incdirs:
+            curdir = i.get_curdir()
+            for d in i.get_incdirs():
+                path = os.path.join(curdir, d)
+                self.uninstalled_incdirs.add(path)
+        if subdir is not None:
+            self.uninstalled_incdirs.add(subdir)
+
+    def _process_libs(
+            self, libs: T.List[ANY_DEP], public: bool
+            ) -> T.Tuple[T.List[T.Union[str, build.SharedLibrary, build.StaticLibrary, build.CustomTarget, build.CustomTargetIndex]], T.List[str], T.List[str]]:
+        libs = mesonlib.listify(libs)
+        processed_libs: T.List[T.Union[str, build.SharedLibrary, build.StaticLibrary, build.CustomTarget, build.CustomTargetIndex]] = []
+        processed_reqs: T.List[str] = []
+        processed_cflags: T.List[str] = []
+        for obj in libs:
+            if (isinstance(obj, (build.CustomTarget, build.CustomTargetIndex, build.SharedLibrary, build.StaticLibrary))
+                    and obj.get_id() in self.metadata):
+                self._check_generated_pc_deprecation(obj)
+                processed_reqs.append(self.metadata[obj.get_id()].filebase)
+            elif isinstance(obj, dependencies.ExternalDependency) and obj.name == 'valgrind':
+                pass
+            elif isinstance(obj, PkgConfigDependency):
+                if obj.found():
+                    processed_reqs.append(obj.name)
+                    self.add_version_reqs(obj.name, obj.version_reqs)
+            elif isinstance(obj, dependencies.InternalDependency):
+                if obj.found():
+                    if obj.objects:
+                        raise mesonlib.MesonException('.pc file cannot refer to individual object files.')
+                    processed_libs += obj.get_link_args()
+                    processed_cflags += obj.get_compile_args()
+                    self._add_lib_dependencies(obj.libraries, obj.whole_libraries, obj.ext_deps, public, private_external_deps=True)
+                    self._add_uninstalled_incdirs(obj.get_include_dirs())
+            elif isinstance(obj, dependencies.Dependency):
+                if obj.found():
+                    processed_libs += obj.get_link_args()
+                    processed_cflags += obj.get_compile_args()
+            elif isinstance(obj, build.SharedLibrary) and obj.shared_library_only:
+                # Do not pull dependencies for shared libraries because they are
+                # only required for static linking. Adding private requires has
+                # the side effect of exposing their cflags, which is the
+                # intended behaviour of pkg-config but force Debian to add more
+                # than needed build deps.
+                # See https://bugs.freedesktop.org/show_bug.cgi?id=105572
+                processed_libs.append(obj)
+                self._add_uninstalled_incdirs(obj.get_include_dirs(), obj.get_subdir())
+            elif isinstance(obj, (build.SharedLibrary, build.StaticLibrary)):
+                processed_libs.append(obj)
+                self._add_uninstalled_incdirs(obj.get_include_dirs(), obj.get_subdir())
+                # If there is a static library in `Libs:` all its deps must be
+                # public too, otherwise the generated pc file will never be
+                # usable without --static.
+                self._add_lib_dependencies(obj.link_targets,
+                                           obj.link_whole_targets,
+                                           obj.external_deps,
+                                           isinstance(obj, build.StaticLibrary) and public)
+            elif isinstance(obj, (build.CustomTarget, build.CustomTargetIndex)):
+                if not obj.is_linkable_target():
+                    raise mesonlib.MesonException('library argument contains a not linkable custom_target.')
+                FeatureNew.single_use('custom_target in pkgconfig.generate libraries', '0.58.0', self.state.subproject)
+                processed_libs.append(obj)
+            elif isinstance(obj, str):
+                processed_libs.append(obj)
+            else:
+                raise mesonlib.MesonException(f'library argument of type {type(obj).__name__} not a string, library or dependency object.')
+
+        return processed_libs, processed_reqs, processed_cflags
+
+    def _add_lib_dependencies(
+            self, link_targets: T.Sequence[build.BuildTargetTypes],
+            link_whole_targets: T.Sequence[T.Union[build.StaticLibrary, build.CustomTarget, build.CustomTargetIndex]],
+            external_deps: T.List[dependencies.Dependency],
+            public: bool,
+            private_external_deps: bool = False) -> None:
+        add_libs = self.add_pub_libs if public else self.add_priv_libs
+        # Recursively add all linked libraries
+        for t in link_targets:
+            # Internal libraries (uninstalled static library) will be promoted
+            # to link_whole, treat them as such here.
+            if t.is_internal():
+                # `is_internal` shouldn't return True for anything but a
+                # StaticLibrary, or a CustomTarget that is a StaticLibrary
+                assert isinstance(t, (build.StaticLibrary, build.CustomTarget, build.CustomTargetIndex)), 'for mypy'
+                self._add_link_whole(t, public)
+            else:
+                add_libs([t])
+        for t in link_whole_targets:
+            self._add_link_whole(t, public)
+        # And finally its external dependencies
+        if private_external_deps:
+            self.add_priv_libs(T.cast('T.List[ANY_DEP]', external_deps))
+        else:
+            add_libs(T.cast('T.List[ANY_DEP]', external_deps))
+
+    def _add_link_whole(self, t: T.Union[build.CustomTarget, build.CustomTargetIndex, build.StaticLibrary], public: bool) -> None:
+        # Don't include static libraries that we link_whole. But we still need to
+        # include their dependencies: a static library we link_whole
+        # could itself link to a shared library or an installed static library.
+        # Keep track of link_whole_targets so we can remove them from our
+        # lists in case a library is link_with and link_whole at the same time.
+        # See remove_dups() below.
+        self.link_whole_targets.append(t)
+        if isinstance(t, build.BuildTarget):
+            self._add_lib_dependencies(t.link_targets, t.link_whole_targets, t.external_deps, public)
+
+    def add_version_reqs(self, name: str, version_reqs: T.Optional[T.List[str]]) -> None:
+        if version_reqs:
+            # Note that pkg-config is picky about whitespace.
+            # 'foo > 1.2' is ok but 'foo>1.2' is not.
+            # foo, bar' is ok, but 'foo,bar' is not.
+            self.version_reqs[name].update(version_reqs)
+
+    def split_version_req(self, s: str) -> T.Tuple[str, T.Optional[str]]:
+        for op in ['>=', '<=', '!=', '==', '=', '>', '<']:
+            pos = s.find(op)
+            if pos > 0:
+                return s[0:pos].strip(), s[pos:].strip()
+        return s, None
+
+    def format_vreq(self, vreq: str) -> str:
+        # vreq are '>=1.0' and pkgconfig wants '>= 1.0'
+        for op in ['>=', '<=', '!=', '==', '=', '>', '<']:
+            if vreq.startswith(op):
+                return op + ' ' + vreq[len(op):]
+        return vreq
+
+    def format_reqs(self, reqs: T.List[str]) -> str:
+        result: T.List[str] = []
+        for name in reqs:
+            vreqs = self.version_reqs.get(name, None)
+            if vreqs:
+                result += [name + ' ' + self.format_vreq(vreq) for vreq in vreqs]
+            else:
+                result += [name]
+        return ', '.join(result)
+
+    def remove_dups(self) -> None:
+        # Set of ids that have already been handled and should not be added any more
+        exclude: T.Set[str] = set()
+
+        # We can't just check if 'x' is excluded because we could have copies of
+        # the same SharedLibrary object for example.
+        def _ids(x: T.Union[str, build.CustomTarget, build.CustomTargetIndex, build.StaticLibrary, build.SharedLibrary]) -> T.Iterable[str]:
+            if isinstance(x, str):
+                yield x
+            else:
+                if x.get_id() in self.metadata:
+                    yield self.metadata[x.get_id()].display_name
+                yield x.get_id()
+
+        # Exclude 'x' in all its forms and return if it was already excluded
+        def _add_exclude(x: T.Union[str, build.CustomTarget, build.CustomTargetIndex, build.StaticLibrary, build.SharedLibrary]) -> bool:
+            was_excluded = False
+            for i in _ids(x):
+                if i in exclude:
+                    was_excluded = True
+                else:
+                    exclude.add(i)
+            return was_excluded
+
+        # link_whole targets are already part of other targets, exclude them all.
+        for t in self.link_whole_targets:
+            _add_exclude(t)
+
+        # Mypy thinks these overlap, but since List is invariant they don't,
+        # `List[str]`` is not a valid input to `List[str | BuildTarget]`.
+        # pylance/pyright gets this right, but for mypy we have to ignore the
+        # error
+        @T.overload
+        def _fn(xs: T.List[str], libs: bool = False) -> T.List[str]: ...  # type: ignore
+
+        @T.overload
+        def _fn(xs: T.List[LIBS], libs: bool = False) -> T.List[LIBS]: ...
+
+        def _fn(xs: T.Union[T.List[str], T.List[LIBS]], libs: bool = False) -> T.Union[T.List[str], T.List[LIBS]]:
+            # Remove duplicates whilst preserving original order
+            result = []
+            for x in xs:
+                # Don't de-dup unknown strings to avoid messing up arguments like:
+                # ['-framework', 'CoreAudio', '-framework', 'CoreMedia']
+                known_flags = ['-pthread']
+                cannot_dedup = libs and isinstance(x, str) and \
+                    not x.startswith(('-l', '-L')) and \
+                    x not in known_flags
+                if not cannot_dedup and _add_exclude(x):
+                    continue
+                result.append(x)
+            return result
+
+        # Handle lists in priority order: public items can be excluded from
+        # private and Requires can excluded from Libs.
+        self.pub_reqs = _fn(self.pub_reqs)
+        self.pub_libs = _fn(self.pub_libs, True)
+        self.priv_reqs = _fn(self.priv_reqs)
+        self.priv_libs = _fn(self.priv_libs, True)
+        # Reset exclude list just in case some values can be both cflags and libs.
+        exclude = set()
+        self.cflags = _fn(self.cflags)
+
+class PkgConfigModule(NewExtensionModule):
+
+    INFO = ModuleInfo('pkgconfig')
+
+    # Track already generated pkg-config files This is stored as a class
+    # variable so that multiple `import()`s share metadata
+    devenv: T.Optional[build.EnvironmentVariables] = None
+    _metadata: T.ClassVar[T.Dict[str, MetaData]] = {}
+
+    def __init__(self) -> None:
+        super().__init__()
+        self.methods.update({
+            'generate': self.generate,
+        })
+
+    def postconf_hook(self, b: build.Build) -> None:
+        b.devenv.append(self.devenv)
+
+    def _get_lname(self, l: T.Union[build.SharedLibrary, build.StaticLibrary, build.CustomTarget, build.CustomTargetIndex],
+                   msg: str, pcfile: str) -> str:
+        if isinstance(l, (build.CustomTargetIndex, build.CustomTarget)):
+            basename = os.path.basename(l.get_filename())
+            name = os.path.splitext(basename)[0]
+            if name.startswith('lib'):
+                name = name[3:]
+            return name
+        # Nothing special
+        if not l.name_prefix_set:
+            return l.name
+        # Sometimes people want the library to start with 'lib' everywhere,
+        # which is achieved by setting name_prefix to '' and the target name to
+        # 'libfoo'. In that case, try to get the pkg-config '-lfoo' arg correct.
+        if l.prefix == '' and l.name.startswith('lib'):
+            return l.name[3:]
+        # If the library is imported via an import library which is always
+        # named after the target name, '-lfoo' is correct.
+        if isinstance(l, build.SharedLibrary) and l.import_filename:
+            return l.name
+        # In other cases, we can't guarantee that the compiler will be able to
+        # find the library via '-lfoo', so tell the user that.
+        mlog.warning(msg.format(l.name, 'name_prefix', l.name, pcfile))
+        return l.name
+
+    def _escape(self, value: T.Union[str, PurePath]) -> str:
+        '''
+        We cannot use quote_arg because it quotes with ' and " which does not
+        work with pkg-config and pkgconf at all.
+        '''
+        # We should always write out paths with / because pkg-config requires
+        # spaces to be quoted with \ and that messes up on Windows:
+        # https://bugs.freedesktop.org/show_bug.cgi?id=103203
+        if isinstance(value, PurePath):
+            value = value.as_posix()
+        return value.replace(' ', r'\ ')
+
+    def _make_relative(self, prefix: T.Union[PurePath, str], subdir: T.Union[PurePath, str]) -> str:
+        prefix = PurePath(prefix)
+        subdir = PurePath(subdir)
+        try:
+            libdir = subdir.relative_to(prefix)
+        except ValueError:
+            libdir = subdir
+        # pathlib joining makes sure absolute libdir is not appended to '${prefix}'
+        return ('${prefix}' / libdir).as_posix()
+
+    def _generate_pkgconfig_file(self, state: ModuleState, deps: DependenciesHelper,
+                                 subdirs: T.List[str], name: str,
+                                 description: str, url: str, version: str,
+                                 pcfile: str, conflicts: T.List[str],
+                                 variables: T.List[T.Tuple[str, str]],
+                                 unescaped_variables: T.List[T.Tuple[str, str]],
+                                 uninstalled: bool = False, dataonly: bool = False,
+                                 pkgroot: T.Optional[str] = None) -> None:
+        coredata = state.environment.get_coredata()
+        referenced_vars = set()
+        optnames = [x.name for x in BUILTIN_DIR_OPTIONS.keys()]
+
+        if not dataonly:
+            # includedir is always implied, although libdir may not be
+            # needed for header-only libraries
+            referenced_vars |= {'prefix', 'includedir'}
+            if deps.pub_libs or deps.priv_libs:
+                referenced_vars |= {'libdir'}
+        # also automatically infer variables referenced in other variables
+        implicit_vars_warning = False
+        redundant_vars_warning = False
+        varnames = set()
+        varstrings = set()
+        for k, v in variables + unescaped_variables:
+            varnames |= {k}
+            varstrings |= {v}
+        for optname in optnames:
+            optvar = f'${{{optname}}}'
+            if any(x.startswith(optvar) for x in varstrings):
+                if optname in varnames:
+                    redundant_vars_warning = True
+                else:
+                    # these 3 vars were always "implicit"
+                    if dataonly or optname not in {'prefix', 'includedir', 'libdir'}:
+                        implicit_vars_warning = True
+                    referenced_vars |= {'prefix', optname}
+        if redundant_vars_warning:
+            FeatureDeprecated.single_use('pkgconfig.generate variable for builtin directories', '0.62.0',
+                                         state.subproject, 'They will be automatically included when referenced',
+                                         state.current_node)
+        if implicit_vars_warning:
+            FeatureNew.single_use('pkgconfig.generate implicit variable for builtin directories', '0.62.0',
+                                  state.subproject, location=state.current_node)
+
+        if uninstalled:
+            outdir = os.path.join(state.environment.build_dir, 'meson-uninstalled')
+            if not os.path.exists(outdir):
+                os.mkdir(outdir)
+            prefix = PurePath(state.environment.get_build_dir())
+            srcdir = PurePath(state.environment.get_source_dir())
+        else:
+            outdir = state.environment.scratch_dir
+            prefix = PurePath(_as_str(coredata.get_option(mesonlib.OptionKey('prefix'))))
+            if pkgroot:
+                pkgroot_ = PurePath(pkgroot)
+                if not pkgroot_.is_absolute():
+                    pkgroot_ = prefix / pkgroot
+                elif prefix not in pkgroot_.parents:
+                    raise mesonlib.MesonException('Pkgconfig prefix cannot be outside of the prefix '
+                                                  'when pkgconfig.relocatable=true. '
+                                                  f'Pkgconfig prefix is {pkgroot_.as_posix()}.')
+                prefix = PurePath('${pcfiledir}', os.path.relpath(prefix, pkgroot_))
+        fname = os.path.join(outdir, pcfile)
+        with open(fname, 'w', encoding='utf-8') as ofile:
+            for optname in optnames:
+                if optname in referenced_vars - varnames:
+                    if optname == 'prefix':
+                        ofile.write('prefix={}\n'.format(self._escape(prefix)))
+                    else:
+                        dirpath = PurePath(_as_str(coredata.get_option(mesonlib.OptionKey(optname))))
+                        ofile.write('{}={}\n'.format(optname, self._escape('${prefix}' / dirpath)))
+            if uninstalled and not dataonly:
+                ofile.write('srcdir={}\n'.format(self._escape(srcdir)))
+            if variables or unescaped_variables:
+                ofile.write('\n')
+            for k, v in variables:
+                ofile.write('{}={}\n'.format(k, self._escape(v)))
+            for k, v in unescaped_variables:
+                ofile.write(f'{k}={v}\n')
+            ofile.write('\n')
+            ofile.write(f'Name: {name}\n')
+            if len(description) > 0:
+                ofile.write(f'Description: {description}\n')
+            if len(url) > 0:
+                ofile.write(f'URL: {url}\n')
+            ofile.write(f'Version: {version}\n')
+            reqs_str = deps.format_reqs(deps.pub_reqs)
+            if len(reqs_str) > 0:
+                ofile.write(f'Requires: {reqs_str}\n')
+            reqs_str = deps.format_reqs(deps.priv_reqs)
+            if len(reqs_str) > 0:
+                ofile.write(f'Requires.private: {reqs_str}\n')
+            if len(conflicts) > 0:
+                ofile.write('Conflicts: {}\n'.format(' '.join(conflicts)))
+
+            def generate_libs_flags(libs: T.List[LIBS]) -> T.Iterable[str]:
+                msg = 'Library target {0!r} has {1!r} set. Compilers ' \
+                      'may not find it from its \'-l{2}\' linker flag in the ' \
+                      '{3!r} pkg-config file.'
+                Lflags = []
+                for l in libs:
+                    if isinstance(l, str):
+                        yield l
+                    else:
+                        install_dir: T.Union[str, bool]
+                        if uninstalled:
+                            install_dir = os.path.dirname(state.backend.get_target_filename_abs(l))
+                        else:
+                            _i = l.get_custom_install_dir()
+                            install_dir = _i[0] if _i else None
+                        if install_dir is False:
+                            continue
+                        if isinstance(l, build.BuildTarget) and 'cs' in l.compilers:
+                            if isinstance(install_dir, str):
+                                Lflag = '-r{}/{}'.format(self._escape(self._make_relative(prefix, install_dir)), l.filename)
+                            else:  # install_dir is True
+                                Lflag = '-r${libdir}/%s' % l.filename
+                        else:
+                            if isinstance(install_dir, str):
+                                Lflag = '-L{}'.format(self._escape(self._make_relative(prefix, install_dir)))
+                            else:  # install_dir is True
+                                Lflag = '-L${libdir}'
+                        if Lflag not in Lflags:
+                            Lflags.append(Lflag)
+                            yield Lflag
+                        lname = self._get_lname(l, msg, pcfile)
+                        # If using a custom suffix, the compiler may not be able to
+                        # find the library
+                        if isinstance(l, build.BuildTarget) and l.name_suffix_set:
+                            mlog.warning(msg.format(l.name, 'name_suffix', lname, pcfile))
+                        if isinstance(l, (build.CustomTarget, build.CustomTargetIndex)) or 'cs' not in l.compilers:
+                            yield f'-l{lname}'
+
+            if len(deps.pub_libs) > 0:
+                ofile.write('Libs: {}\n'.format(' '.join(generate_libs_flags(deps.pub_libs))))
+            if len(deps.priv_libs) > 0:
+                ofile.write('Libs.private: {}\n'.format(' '.join(generate_libs_flags(deps.priv_libs))))
+
+            cflags: T.List[str] = []
+            if uninstalled:
+                for d in deps.uninstalled_incdirs:
+                    for basedir in ['${prefix}', '${srcdir}']:
+                        path = self._escape(PurePath(basedir, d).as_posix())
+                        cflags.append(f'-I{path}')
+            else:
+                for d in subdirs:
+                    if d == '.':
+                        cflags.append('-I${includedir}')
+                    else:
+                        cflags.append(self._escape(PurePath('-I${includedir}') / d))
+            cflags += [self._escape(f) for f in deps.cflags]
+            if cflags and not dataonly:
+                ofile.write('Cflags: {}\n'.format(' '.join(cflags)))
+
+    @typed_pos_args('pkgconfig.generate', optargs=[(build.SharedLibrary, build.StaticLibrary)])
+    @typed_kwargs(
+        'pkgconfig.generate',
+        D_MODULE_VERSIONS_KW.evolve(since='0.43.0'),
+        INSTALL_DIR_KW,
+        KwargInfo('conflicts', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo('dataonly', bool, default=False, since='0.54.0'),
+        KwargInfo('description', (str, NoneType)),
+        KwargInfo('extra_cflags', ContainerTypeInfo(list, str), default=[], listify=True, since='0.42.0'),
+        KwargInfo('filebase', (str, NoneType), validator=lambda x: 'must not be an empty string' if x == '' else None),
+        KwargInfo('name', (str, NoneType), validator=lambda x: 'must not be an empty string' if x == '' else None),
+        KwargInfo('subdirs', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo('url', str, default=''),
+        KwargInfo('version', (str, NoneType)),
+        VARIABLES_KW.evolve(name="unescaped_uninstalled_variables", since='0.59.0'),
+        VARIABLES_KW.evolve(name="unescaped_variables", since='0.59.0'),
+        VARIABLES_KW.evolve(name="uninstalled_variables", since='0.54.0', since_values={dict: '0.56.0'}),
+        VARIABLES_KW.evolve(since='0.41.0', since_values={dict: '0.56.0'}),
+        _PKG_LIBRARIES,
+        _PKG_LIBRARIES.evolve(name='libraries_private'),
+        _PKG_REQUIRES,
+        _PKG_REQUIRES.evolve(name='requires_private'),
+    )
+    def generate(self, state: ModuleState,
+                 args: T.Tuple[T.Optional[T.Union[build.SharedLibrary, build.StaticLibrary]]],
+                 kwargs: GenerateKw) -> ModuleReturnValue:
+        default_version = state.project_version
+        default_install_dir: T.Optional[str] = None
+        default_description: T.Optional[str] = None
+        default_name: T.Optional[str] = None
+        mainlib: T.Optional[T.Union[build.SharedLibrary, build.StaticLibrary]] = None
+        default_subdirs = ['.']
+        if args[0]:
+            FeatureNew.single_use('pkgconfig.generate optional positional argument', '0.46.0', state.subproject)
+            mainlib = args[0]
+            default_name = mainlib.name
+            default_description = state.project_name + ': ' + mainlib.name
+            install_dir = mainlib.get_custom_install_dir()
+            if install_dir and isinstance(install_dir[0], str):
+                default_install_dir = os.path.join(install_dir[0], 'pkgconfig')
+        else:
+            if kwargs['version'] is None:
+                FeatureNew.single_use('pkgconfig.generate implicit version keyword', '0.46.0', state.subproject)
+            msg = ('pkgconfig.generate: if a library is not passed as a '
+                   'positional argument, the {!r} keyword argument is '
+                   'required.')
+            if kwargs['name'] is None:
+                raise build.InvalidArguments(msg.format('name'))
+            if kwargs['description'] is None:
+                raise build.InvalidArguments(msg.format('description'))
+
+        dataonly = kwargs['dataonly']
+        if dataonly:
+            default_subdirs = []
+            blocked_vars = ['libraries', 'libraries_private', 'requires_private', 'extra_cflags', 'subdirs']
+            if any(kwargs[k] for k in blocked_vars):  # type: ignore
+                raise mesonlib.MesonException(f'Cannot combine dataonly with any of {blocked_vars}')
+            default_install_dir = os.path.join(state.environment.get_datadir(), 'pkgconfig')
+
+        subdirs = kwargs['subdirs'] or default_subdirs
+        version = kwargs['version'] if kwargs['version'] is not None else default_version
+        name = kwargs['name'] if kwargs['name'] is not None else default_name
+        assert isinstance(name, str), 'for mypy'
+        filebase = kwargs['filebase'] if kwargs['filebase'] is not None else name
+        description = kwargs['description'] if kwargs['description'] is not None else default_description
+        url = kwargs['url']
+        conflicts = kwargs['conflicts']
+
+        # Prepend the main library to public libraries list. This is required
+        # so dep.add_pub_libs() can handle dependency ordering correctly and put
+        # extra libraries after the main library.
+        libraries = kwargs['libraries'].copy()
+        if mainlib:
+            libraries.insert(0, mainlib)
+
+        deps = DependenciesHelper(state, filebase, self._metadata)
+        deps.add_pub_libs(libraries)
+        deps.add_priv_libs(kwargs['libraries_private'])
+        deps.add_pub_reqs(kwargs['requires'])
+        deps.add_priv_reqs(kwargs['requires_private'])
+        deps.add_cflags(kwargs['extra_cflags'])
+
+        dversions = kwargs['d_module_versions']
+        if dversions:
+            compiler = state.environment.coredata.compilers.host.get('d')
+            if compiler:
+                deps.add_cflags(compiler.get_feature_args({'versions': dversions}, None))
+
+        deps.remove_dups()
+
+        def parse_variable_list(vardict: T.Dict[str, str]) -> T.List[T.Tuple[str, str]]:
+            reserved = ['prefix', 'libdir', 'includedir']
+            variables = []
+            for name, value in vardict.items():
+                if not dataonly and name in reserved:
+                    raise mesonlib.MesonException(f'Variable "{name}" is reserved')
+                variables.append((name, value))
+            return variables
+
+        variables = parse_variable_list(kwargs['variables'])
+        unescaped_variables = parse_variable_list(kwargs['unescaped_variables'])
+
+        pcfile = filebase + '.pc'
+        pkgroot = pkgroot_name = kwargs['install_dir'] or default_install_dir
+        if pkgroot is None:
+            if mesonlib.is_freebsd():
+                pkgroot = os.path.join(_as_str(state.environment.coredata.get_option(mesonlib.OptionKey('prefix'))), 'libdata', 'pkgconfig')
+                pkgroot_name = os.path.join('{prefix}', 'libdata', 'pkgconfig')
+            elif mesonlib.is_haiku():
+                pkgroot = os.path.join(_as_str(state.environment.coredata.get_option(mesonlib.OptionKey('prefix'))), 'develop', 'lib', 'pkgconfig')
+                pkgroot_name = os.path.join('{prefix}', 'develop', 'lib', 'pkgconfig')
+            else:
+                pkgroot = os.path.join(_as_str(state.environment.coredata.get_option(mesonlib.OptionKey('libdir'))), 'pkgconfig')
+                pkgroot_name = os.path.join('{libdir}', 'pkgconfig')
+        relocatable = state.get_option('relocatable', module='pkgconfig')
+        self._generate_pkgconfig_file(state, deps, subdirs, name, description, url,
+                                      version, pcfile, conflicts, variables,
+                                      unescaped_variables, False, dataonly,
+                                      pkgroot=pkgroot if relocatable else None)
+        res = build.Data([mesonlib.File(True, state.environment.get_scratch_dir(), pcfile)], pkgroot, pkgroot_name, None, state.subproject, install_tag='devel')
+        variables = parse_variable_list(kwargs['uninstalled_variables'])
+        unescaped_variables = parse_variable_list(kwargs['unescaped_uninstalled_variables'])
+
+        pcfile = filebase + '-uninstalled.pc'
+        self._generate_pkgconfig_file(state, deps, subdirs, name, description, url,
+                                      version, pcfile, conflicts, variables,
+                                      unescaped_variables, uninstalled=True, dataonly=dataonly)
+        # Associate the main library with this generated pc file. If the library
+        # is used in any subsequent call to the generated, it will generate a
+        # 'Requires:' or 'Requires.private:'.
+        # Backward compatibility: We used to set 'generated_pc' on all public
+        # libraries instead of just the main one. Keep doing that but warn if
+        # anyone is relying on that deprecated behaviour.
+        if mainlib:
+            if mainlib.get_id() not in self._metadata:
+                self._metadata[mainlib.get_id()] = MetaData(
+                    filebase, name, state.current_node)
+            else:
+                mlog.warning('Already generated a pkg-config file for', mlog.bold(mainlib.name))
+        else:
+            for lib in deps.pub_libs:
+                if not isinstance(lib, str) and lib.get_id() not in self._metadata:
+                    self._metadata[lib.get_id()] = MetaData(
+                        filebase, name, state.current_node)
+        if self.devenv is None:
+            self.devenv = PkgConfigDependency.get_env(state.environment, mesonlib.MachineChoice.HOST, uninstalled=True)
+        return ModuleReturnValue(res, [res])
+
+
+def initialize(interp: Interpreter) -> PkgConfigModule:
+    return PkgConfigModule()
diff --git a/vendored-meson/meson/mesonbuild/modules/python.py b/vendored-meson/meson/mesonbuild/modules/python.py
new file mode 100644
index 000000000000..ac74e13dc5b7
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/python.py
@@ -0,0 +1,487 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import copy, json, os, shutil
+import typing as T
+
+from . import ExtensionModule, ModuleInfo
+from .. import mesonlib
+from .. import mlog
+from ..coredata import UserFeatureOption
+from ..build import known_shmod_kwargs
+from ..dependencies import NotFoundDependency
+from ..dependencies.detect import get_dep_identifier, find_external_dependency
+from ..dependencies.python import BasicPythonExternalProgram, python_factory, _PythonDependencyBase
+from ..interpreter import ExternalProgramHolder, extract_required_kwarg, permitted_dependency_kwargs
+from ..interpreter import primitives as P_OBJ
+from ..interpreter.type_checking import NoneType, PRESERVE_PATH_KW
+from ..interpreterbase import (
+    noPosargs, noKwargs, permittedKwargs, ContainerTypeInfo,
+    InvalidArguments, typed_pos_args, typed_kwargs, KwargInfo,
+    FeatureNew, FeatureNewKwargs, disablerIfNotFound
+)
+from ..mesonlib import MachineChoice
+from ..programs import ExternalProgram, NonExistingExternalProgram
+
+if T.TYPE_CHECKING:
+    from typing_extensions import TypedDict
+
+    from . import ModuleState
+    from ..build import Build, SharedModule, Data
+    from ..dependencies import Dependency
+    from ..interpreter import Interpreter
+    from ..interpreter.kwargs import ExtractRequired
+    from ..interpreterbase.interpreterbase import TYPE_var, TYPE_kwargs
+
+    class PyInstallKw(TypedDict):
+
+        pure: T.Optional[bool]
+        subdir: str
+        install_tag: T.Optional[str]
+
+    class FindInstallationKw(ExtractRequired):
+
+        disabler: bool
+        modules: T.List[str]
+        pure: T.Optional[bool]
+
+
+mod_kwargs = {'subdir'}
+mod_kwargs.update(known_shmod_kwargs)
+mod_kwargs -= {'name_prefix', 'name_suffix'}
+
+
+class PythonExternalProgram(BasicPythonExternalProgram):
+
+    # This is a ClassVar instead of an instance bool, because although an
+    # installation is cached, we actually copy it, modify attributes such as pure,
+    # and return a temporary one rather than the cached object.
+    run_bytecompile: T.ClassVar[T.Dict[str, bool]] = {}
+
+    def sanity(self, state: T.Optional['ModuleState'] = None) -> bool:
+        ret = super().sanity()
+        if ret:
+            self.platlib = self._get_path(state, 'platlib')
+            self.purelib = self._get_path(state, 'purelib')
+        return ret
+
+    def _get_path(self, state: T.Optional['ModuleState'], key: str) -> None:
+        rel_path = self.info['install_paths'][key][1:]
+        if not state:
+            # This happens only from run_project_tests.py
+            return rel_path
+        value = state.get_option(f'{key}dir', module='python')
+        if value:
+            if state.is_user_defined_option('install_env', module='python'):
+                raise mesonlib.MesonException(f'python.{key}dir and python.install_env are mutually exclusive')
+            return value
+
+        install_env = state.get_option('install_env', module='python')
+        if install_env == 'auto':
+            install_env = 'venv' if self.info['is_venv'] else 'system'
+
+        if install_env == 'system':
+            rel_path = os.path.join(self.info['variables']['prefix'], rel_path)
+        elif install_env == 'venv':
+            if not self.info['is_venv']:
+                raise mesonlib.MesonException('python.install_env cannot be set to "venv" unless you are in a venv!')
+            # inside a venv, deb_system is *never* active hence info['paths'] may be wrong
+            rel_path = self.info['sysconfig_paths'][key]
+
+        return rel_path
+
+
+_PURE_KW = KwargInfo('pure', (bool, NoneType))
+_SUBDIR_KW = KwargInfo('subdir', str, default='')
+
+
+class PythonInstallation(ExternalProgramHolder):
+    def __init__(self, python: 'PythonExternalProgram', interpreter: 'Interpreter'):
+        ExternalProgramHolder.__init__(self, python, interpreter)
+        info = python.info
+        prefix = self.interpreter.environment.coredata.get_option(mesonlib.OptionKey('prefix'))
+        assert isinstance(prefix, str), 'for mypy'
+        self.variables = info['variables']
+        self.suffix = info['suffix']
+        self.paths = info['paths']
+        self.pure = python.pure
+        self.platlib_install_path = os.path.join(prefix, python.platlib)
+        self.purelib_install_path = os.path.join(prefix, python.purelib)
+        self.version = info['version']
+        self.platform = info['platform']
+        self.is_pypy = info['is_pypy']
+        self.link_libpython = info['link_libpython']
+        self.methods.update({
+            'extension_module': self.extension_module_method,
+            'dependency': self.dependency_method,
+            'install_sources': self.install_sources_method,
+            'get_install_dir': self.get_install_dir_method,
+            'language_version': self.language_version_method,
+            'found': self.found_method,
+            'has_path': self.has_path_method,
+            'get_path': self.get_path_method,
+            'has_variable': self.has_variable_method,
+            'get_variable': self.get_variable_method,
+            'path': self.path_method,
+        })
+
+    @permittedKwargs(mod_kwargs)
+    def extension_module_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> 'SharedModule':
+        if 'install_dir' in kwargs:
+            if 'subdir' in kwargs:
+                raise InvalidArguments('"subdir" and "install_dir" are mutually exclusive')
+        else:
+            subdir = kwargs.pop('subdir', '')
+            if not isinstance(subdir, str):
+                raise InvalidArguments('"subdir" argument must be a string.')
+
+            kwargs['install_dir'] = self._get_install_dir_impl(False, subdir)
+
+        new_deps = mesonlib.extract_as_list(kwargs, 'dependencies')
+        has_pydep = any(isinstance(dep, _PythonDependencyBase) for dep in new_deps)
+        if not has_pydep:
+            pydep = self._dependency_method_impl({})
+            if not pydep.found():
+                raise mesonlib.MesonException('Python dependency not found')
+            new_deps.append(pydep)
+            FeatureNew.single_use('python_installation.extension_module with implicit dependency on python',
+                                  '0.63.0', self.subproject, 'use python_installation.dependency()',
+                                  self.current_node)
+        kwargs['dependencies'] = new_deps
+
+        # msys2's python3 has "-cpython-36m.dll", we have to be clever
+        # FIXME: explain what the specific cleverness is here
+        split, suffix = self.suffix.rsplit('.', 1)
+        args[0] += split
+
+        kwargs['name_prefix'] = ''
+        kwargs['name_suffix'] = suffix
+
+        if 'gnu_symbol_visibility' not in kwargs and \
+                (self.is_pypy or mesonlib.version_compare(self.version, '>=3.9')):
+            kwargs['gnu_symbol_visibility'] = 'inlineshidden'
+
+        return self.interpreter.func_shared_module(None, args, kwargs)
+
+    def _dependency_method_impl(self, kwargs: TYPE_kwargs) -> Dependency:
+        for_machine = self.interpreter.machine_from_native_kwarg(kwargs)
+        identifier = get_dep_identifier(self._full_path(), kwargs)
+
+        dep = self.interpreter.coredata.deps[for_machine].get(identifier)
+        if dep is not None:
+            return dep
+
+        new_kwargs = kwargs.copy()
+        new_kwargs['required'] = False
+        candidates = python_factory(self.interpreter.environment, for_machine, new_kwargs, self.held_object)
+        dep = find_external_dependency('python', self.interpreter.environment, new_kwargs, candidates)
+
+        self.interpreter.coredata.deps[for_machine].put(identifier, dep)
+        return dep
+
+    @disablerIfNotFound
+    @permittedKwargs(permitted_dependency_kwargs | {'embed'})
+    @FeatureNewKwargs('python_installation.dependency', '0.53.0', ['embed'])
+    @noPosargs
+    def dependency_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> 'Dependency':
+        disabled, required, feature = extract_required_kwarg(kwargs, self.subproject)
+        if disabled:
+            mlog.log('Dependency', mlog.bold('python'), 'skipped: feature', mlog.bold(feature), 'disabled')
+            return NotFoundDependency('python', self.interpreter.environment)
+        else:
+            dep = self._dependency_method_impl(kwargs)
+            if required and not dep.found():
+                raise mesonlib.MesonException('Python dependency not found')
+            return dep
+
+    @typed_pos_args('install_data', varargs=(str, mesonlib.File))
+    @typed_kwargs(
+        'python_installation.install_sources',
+        _PURE_KW,
+        _SUBDIR_KW,
+        PRESERVE_PATH_KW,
+        KwargInfo('install_tag', (str, NoneType), since='0.60.0')
+    )
+    def install_sources_method(self, args: T.Tuple[T.List[T.Union[str, mesonlib.File]]],
+                               kwargs: 'PyInstallKw') -> 'Data':
+        self.held_object.run_bytecompile[self.version] = True
+        tag = kwargs['install_tag'] or 'python-runtime'
+        pure = kwargs['pure'] if kwargs['pure'] is not None else self.pure
+        install_dir = self._get_install_dir_impl(pure, kwargs['subdir'])
+        return self.interpreter.install_data_impl(
+            self.interpreter.source_strings_to_files(args[0]),
+            install_dir,
+            mesonlib.FileMode(), rename=None, tag=tag, install_data_type='python',
+            install_dir_name=install_dir.optname,
+            preserve_path=kwargs['preserve_path'])
+
+    @noPosargs
+    @typed_kwargs('python_installation.install_dir', _PURE_KW, _SUBDIR_KW)
+    def get_install_dir_method(self, args: T.List['TYPE_var'], kwargs: 'PyInstallKw') -> str:
+        self.held_object.run_bytecompile[self.version] = True
+        pure = kwargs['pure'] if kwargs['pure'] is not None else self.pure
+        return self._get_install_dir_impl(pure, kwargs['subdir'])
+
+    def _get_install_dir_impl(self, pure: bool, subdir: str) -> P_OBJ.OptionString:
+        if pure:
+            base = self.purelib_install_path
+            name = '{py_purelib}'
+        else:
+            base = self.platlib_install_path
+            name = '{py_platlib}'
+
+        return P_OBJ.OptionString(os.path.join(base, subdir), os.path.join(name, subdir))
+
+    @noPosargs
+    @noKwargs
+    def language_version_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        return self.version
+
+    @typed_pos_args('python_installation.has_path', str)
+    @noKwargs
+    def has_path_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool:
+        return args[0] in self.paths
+
+    @typed_pos_args('python_installation.get_path', str, optargs=[object])
+    @noKwargs
+    def get_path_method(self, args: T.Tuple[str, T.Optional['TYPE_var']], kwargs: 'TYPE_kwargs') -> 'TYPE_var':
+        path_name, fallback = args
+        try:
+            return self.paths[path_name]
+        except KeyError:
+            if fallback is not None:
+                return fallback
+            raise InvalidArguments(f'{path_name} is not a valid path name')
+
+    @typed_pos_args('python_installation.has_variable', str)
+    @noKwargs
+    def has_variable_method(self, args: T.Tuple[str], kwargs: 'TYPE_kwargs') -> bool:
+        return args[0] in self.variables
+
+    @typed_pos_args('python_installation.get_variable', str, optargs=[object])
+    @noKwargs
+    def get_variable_method(self, args: T.Tuple[str, T.Optional['TYPE_var']], kwargs: 'TYPE_kwargs') -> 'TYPE_var':
+        var_name, fallback = args
+        try:
+            return self.variables[var_name]
+        except KeyError:
+            if fallback is not None:
+                return fallback
+            raise InvalidArguments(f'{var_name} is not a valid variable name')
+
+    @noPosargs
+    @noKwargs
+    @FeatureNew('Python module path method', '0.50.0')
+    def path_method(self, args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> str:
+        return super().path_method(args, kwargs)
+
+
+class PythonModule(ExtensionModule):
+
+    INFO = ModuleInfo('python', '0.46.0')
+
+    def __init__(self, interpreter: 'Interpreter') -> None:
+        super().__init__(interpreter)
+        self.installations: T.Dict[str, ExternalProgram] = {}
+        self.methods.update({
+            'find_installation': self.find_installation,
+        })
+
+    def _get_install_scripts(self) -> T.List[mesonlib.ExecutableSerialisation]:
+        backend = self.interpreter.backend
+        ret = []
+        optlevel = self.interpreter.environment.coredata.get_option(mesonlib.OptionKey('bytecompile', module='python'))
+        if optlevel == -1:
+            return ret
+        if not any(PythonExternalProgram.run_bytecompile.values()):
+            return ret
+
+        installdata = backend.create_install_data()
+        py_files = []
+
+        def should_append(f, isdir: bool = False):
+            # This uses the install_plan decorated names to see if the original source was propagated via
+            # install_sources() or get_install_dir().
+            return f.startswith(('{py_platlib}', '{py_purelib}')) and (f.endswith('.py') or isdir)
+
+        for t in installdata.targets:
+            if should_append(t.out_name):
+                py_files.append((t.out_name, os.path.join(installdata.prefix, t.outdir, os.path.basename(t.fname))))
+        for d in installdata.data:
+            if should_append(d.install_path_name):
+                py_files.append((d.install_path_name, os.path.join(installdata.prefix, d.install_path)))
+        for d in installdata.install_subdirs:
+            if should_append(d.install_path_name, True):
+                py_files.append((d.install_path_name, os.path.join(installdata.prefix, d.install_path)))
+
+        import importlib.resources
+        pycompile = os.path.join(self.interpreter.environment.get_scratch_dir(), 'pycompile.py')
+        with open(pycompile, 'wb') as f:
+            f.write(importlib.resources.read_binary('mesonbuild.scripts', 'pycompile.py'))
+
+        for i in self.installations.values():
+            if isinstance(i, PythonExternalProgram) and i.run_bytecompile[i.info['version']]:
+                i = T.cast(PythonExternalProgram, i)
+                manifest = f'python-{i.info["version"]}-installed.json'
+                manifest_json = []
+                for name, f in py_files:
+                    if f.startswith((os.path.join(installdata.prefix, i.platlib), os.path.join(installdata.prefix, i.purelib))):
+                        manifest_json.append(name)
+                with open(os.path.join(self.interpreter.environment.get_scratch_dir(), manifest), 'w', encoding='utf-8') as f:
+                    json.dump(manifest_json, f)
+                cmd = i.command + [pycompile, manifest, str(optlevel)]
+
+                script = backend.get_executable_serialisation(cmd, verbose=True, tag='python-runtime',
+                                                              installdir_map={'py_purelib': i.purelib, 'py_platlib': i.platlib})
+                ret.append(script)
+        return ret
+
+    def postconf_hook(self, b: Build) -> None:
+        b.install_scripts.extend(self._get_install_scripts())
+
+    # https://www.python.org/dev/peps/pep-0397/
+    @staticmethod
+    def _get_win_pythonpath(name_or_path: str) -> T.Optional[str]:
+        if not name_or_path.startswith(('python2', 'python3')):
+            return None
+        if not shutil.which('py'):
+            # program not installed, return without an exception
+            return None
+        ver = f'-{name_or_path[6:]}'
+        cmd = ['py', ver, '-c', "import sysconfig; print(sysconfig.get_config_var('BINDIR'))"]
+        _, stdout, _ = mesonlib.Popen_safe(cmd)
+        directory = stdout.strip()
+        if os.path.exists(directory):
+            return os.path.join(directory, 'python')
+        else:
+            return None
+
+    def _find_installation_impl(self, state: 'ModuleState', display_name: str, name_or_path: str, required: bool) -> ExternalProgram:
+        if not name_or_path:
+            python = PythonExternalProgram('python3', mesonlib.python_command)
+        else:
+            tmp_python = ExternalProgram.from_entry(display_name, name_or_path)
+            python = PythonExternalProgram(display_name, ext_prog=tmp_python)
+
+            if not python.found() and mesonlib.is_windows():
+                pythonpath = self._get_win_pythonpath(name_or_path)
+                if pythonpath is not None:
+                    name_or_path = pythonpath
+                    python = PythonExternalProgram(name_or_path)
+
+            # Last ditch effort, python2 or python3 can be named python
+            # on various platforms, let's not give up just yet, if an executable
+            # named python is available and has a compatible version, let's use
+            # it
+            if not python.found() and name_or_path in {'python2', 'python3'}:
+                tmp_python = ExternalProgram.from_entry(display_name, 'python')
+                python = PythonExternalProgram(name_or_path, ext_prog=tmp_python)
+
+        if python.found():
+            if python.sanity(state):
+                return python
+            else:
+                sanitymsg = f'{python} is not a valid python or it is missing distutils'
+                if required:
+                    raise mesonlib.MesonException(sanitymsg)
+                else:
+                    mlog.warning(sanitymsg, location=state.current_node)
+
+        return NonExistingExternalProgram(python.name)
+
+    @disablerIfNotFound
+    @typed_pos_args('python.find_installation', optargs=[str])
+    @typed_kwargs(
+        'python.find_installation',
+        KwargInfo('required', (bool, UserFeatureOption), default=True),
+        KwargInfo('disabler', bool, default=False, since='0.49.0'),
+        KwargInfo('modules', ContainerTypeInfo(list, str), listify=True, default=[], since='0.51.0'),
+        _PURE_KW.evolve(default=True, since='0.64.0'),
+    )
+    def find_installation(self, state: 'ModuleState', args: T.Tuple[T.Optional[str]],
+                          kwargs: 'FindInstallationKw') -> ExternalProgram:
+        feature_check = FeatureNew('Passing "feature" option to find_installation', '0.48.0')
+        disabled, required, feature = extract_required_kwarg(kwargs, state.subproject, feature_check)
+
+        # FIXME: this code is *full* of sharp corners. It assumes that it's
+        # going to get a string value (or now a list of length 1), of `python2`
+        # or `python3` which is completely nonsense.  On windows the value could
+        # easily be `['py', '-3']`, or `['py', '-3.7']` to get a very specific
+        # version of python. On Linux we might want a python that's not in
+        # $PATH, or that uses a wrapper of some kind.
+        np: T.List[str] = state.environment.lookup_binary_entry(MachineChoice.HOST, 'python') or []
+        fallback = args[0]
+        display_name = fallback or 'python'
+        if not np and fallback is not None:
+            np = [fallback]
+        name_or_path = np[0] if np else None
+
+        if disabled:
+            mlog.log('Program', name_or_path or 'python', 'found:', mlog.red('NO'), '(disabled by:', mlog.bold(feature), ')')
+            return NonExistingExternalProgram()
+
+        python = self.installations.get(name_or_path)
+        if not python:
+            python = self._find_installation_impl(state, display_name, name_or_path, required)
+            self.installations[name_or_path] = python
+
+        want_modules = kwargs['modules']
+        found_modules: T.List[str] = []
+        missing_modules: T.List[str] = []
+        if python.found() and want_modules:
+            for mod in want_modules:
+                p, *_ = mesonlib.Popen_safe(
+                    python.command +
+                    ['-c', f'import {mod}'])
+                if p.returncode != 0:
+                    missing_modules.append(mod)
+                else:
+                    found_modules.append(mod)
+
+        msg: T.List['mlog.TV_Loggable'] = ['Program', python.name]
+        if want_modules:
+            msg.append('({})'.format(', '.join(want_modules)))
+        msg.append('found:')
+        if python.found() and not missing_modules:
+            msg.extend([mlog.green('YES'), '({})'.format(' '.join(python.command))])
+        else:
+            msg.append(mlog.red('NO'))
+        if found_modules:
+            msg.append('modules:')
+            msg.append(', '.join(found_modules))
+
+        mlog.log(*msg)
+
+        if not python.found():
+            if required:
+                raise mesonlib.MesonException('{} not found'.format(name_or_path or 'python'))
+            return NonExistingExternalProgram(python.name)
+        elif missing_modules:
+            if required:
+                raise mesonlib.MesonException('{} is missing modules: {}'.format(name_or_path or 'python', ', '.join(missing_modules)))
+            return NonExistingExternalProgram(python.name)
+        else:
+            python = copy.copy(python)
+            python.pure = kwargs['pure']
+            python.run_bytecompile.setdefault(python.info['version'], False)
+            return python
+
+        raise mesonlib.MesonBugException('Unreachable code was reached (PythonModule.find_installation).')
+
+
+def initialize(interpreter: 'Interpreter') -> PythonModule:
+    mod = PythonModule(interpreter)
+    mod.interpreter.append_holder_map(PythonExternalProgram, PythonInstallation)
+    return mod
diff --git a/vendored-meson/meson/mesonbuild/modules/python3.py b/vendored-meson/meson/mesonbuild/modules/python3.py
new file mode 100644
index 000000000000..065e8d7a2318
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/python3.py
@@ -0,0 +1,85 @@
+# Copyright 2016-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import sysconfig
+from .. import mesonlib
+
+from . import ExtensionModule, ModuleInfo
+from ..interpreterbase import typed_pos_args, noPosargs, noKwargs, permittedKwargs
+from ..build import known_shmod_kwargs
+from ..programs import ExternalProgram
+
+
+class Python3Module(ExtensionModule):
+
+    INFO = ModuleInfo('python3', '0.38.0', deprecated='0.48.0')
+
+    def __init__(self, *args, **kwargs):
+        super().__init__(*args, **kwargs)
+        self.methods.update({
+            'extension_module': self.extension_module,
+            'find_python': self.find_python,
+            'language_version': self.language_version,
+            'sysconfig_path': self.sysconfig_path,
+        })
+
+    @permittedKwargs(known_shmod_kwargs)
+    def extension_module(self, state, args, kwargs):
+        if 'name_prefix' in kwargs:
+            raise mesonlib.MesonException('Name_prefix is set automatically, specifying it is forbidden.')
+        if 'name_suffix' in kwargs:
+            raise mesonlib.MesonException('Name_suffix is set automatically, specifying it is forbidden.')
+        host_system = state.host_machine.system
+        if host_system == 'darwin':
+            # Default suffix is 'dylib' but Python does not use it for extensions.
+            suffix = 'so'
+        elif host_system == 'windows':
+            # On Windows the extension is pyd for some unexplainable reason.
+            suffix = 'pyd'
+        else:
+            suffix = []
+        kwargs['name_prefix'] = ''
+        kwargs['name_suffix'] = suffix
+        return self.interpreter.func_shared_module(None, args, kwargs)
+
+    @noPosargs
+    @noKwargs
+    def find_python(self, state, args, kwargs):
+        command = state.environment.lookup_binary_entry(mesonlib.MachineChoice.HOST, 'python3')
+        if command is not None:
+            py3 = ExternalProgram.from_entry('python3', command)
+        else:
+            py3 = ExternalProgram('python3', mesonlib.python_command, silent=True)
+        return py3
+
+    @noPosargs
+    @noKwargs
+    def language_version(self, state, args, kwargs):
+        return sysconfig.get_python_version()
+
+    @noKwargs
+    @typed_pos_args('python3.sysconfig_path', str)
+    def sysconfig_path(self, state, args, kwargs):
+        path_name = args[0]
+        valid_names = sysconfig.get_path_names()
+        if path_name not in valid_names:
+            raise mesonlib.MesonException(f'{path_name} is not a valid path name {valid_names}.')
+
+        # Get a relative path without a prefix, e.g. lib/python3.6/site-packages
+        return sysconfig.get_path(path_name, vars={'base': '', 'platbase': '', 'installed_base': ''})[1:]
+
+
+def initialize(*args, **kwargs):
+    return Python3Module(*args, **kwargs)
diff --git a/vendored-meson/meson/mesonbuild/modules/qt.py b/vendored-meson/meson/mesonbuild/modules/qt.py
new file mode 100644
index 000000000000..85558885343a
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/qt.py
@@ -0,0 +1,608 @@
+# Copyright 2015 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import shutil
+import typing as T
+import xml.etree.ElementTree as ET
+
+from . import ModuleReturnValue, ExtensionModule
+from .. import build
+from .. import coredata
+from .. import mlog
+from ..dependencies import find_external_dependency, Dependency, ExternalLibrary
+from ..mesonlib import MesonException, File, version_compare, Popen_safe
+from ..interpreter import extract_required_kwarg
+from ..interpreter.type_checking import INSTALL_DIR_KW, INSTALL_KW, NoneType
+from ..interpreterbase import ContainerTypeInfo, FeatureDeprecated, KwargInfo, noPosargs, FeatureNew, typed_kwargs
+from ..programs import NonExistingExternalProgram
+
+if T.TYPE_CHECKING:
+    from . import ModuleState
+    from ..dependencies.qt import QtPkgConfigDependency, QmakeQtDependency
+    from ..interpreter import Interpreter
+    from ..interpreter import kwargs
+    from ..mesonlib import FileOrString
+    from ..programs import ExternalProgram
+
+    QtDependencyType = T.Union[QtPkgConfigDependency, QmakeQtDependency]
+
+    from typing_extensions import TypedDict
+
+    class ResourceCompilerKwArgs(TypedDict):
+
+        """Keyword arguments for the Resource Compiler method."""
+
+        name: T.Optional[str]
+        sources: T.Sequence[T.Union[FileOrString, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]]
+        extra_args: T.List[str]
+        method: str
+
+    class UICompilerKwArgs(TypedDict):
+
+        """Keyword arguments for the Ui Compiler method."""
+
+        sources: T.Sequence[T.Union[FileOrString, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]]
+        extra_args: T.List[str]
+        method: str
+
+    class MocCompilerKwArgs(TypedDict):
+
+        """Keyword arguments for the Moc Compiler method."""
+
+        sources: T.Sequence[T.Union[FileOrString, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]]
+        headers: T.Sequence[T.Union[FileOrString, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]]
+        extra_args: T.List[str]
+        method: str
+        include_directories: T.List[T.Union[str, build.IncludeDirs]]
+        dependencies: T.List[T.Union[Dependency, ExternalLibrary]]
+
+    class PreprocessKwArgs(TypedDict):
+
+        sources: T.List[FileOrString]
+        moc_sources: T.List[T.Union[FileOrString, build.CustomTarget]]
+        moc_headers: T.List[T.Union[FileOrString, build.CustomTarget]]
+        qresources: T.List[FileOrString]
+        ui_files: T.List[T.Union[FileOrString, build.CustomTarget]]
+        moc_extra_arguments: T.List[str]
+        rcc_extra_arguments: T.List[str]
+        uic_extra_arguments: T.List[str]
+        include_directories: T.List[T.Union[str, build.IncludeDirs]]
+        dependencies: T.List[T.Union[Dependency, ExternalLibrary]]
+        method: str
+
+    class HasToolKwArgs(kwargs.ExtractRequired):
+
+        method: str
+
+    class CompileTranslationsKwArgs(TypedDict):
+
+        build_by_default: bool
+        install: bool
+        install_dir: T.Optional[str]
+        method: str
+        qresource: T.Optional[str]
+        rcc_extra_arguments: T.List[str]
+        ts_files: T.List[T.Union[str, File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]]
+
+class QtBaseModule(ExtensionModule):
+    _tools_detected = False
+    _rcc_supports_depfiles = False
+    _moc_supports_depfiles = False
+
+    def __init__(self, interpreter: 'Interpreter', qt_version: int = 5):
+        ExtensionModule.__init__(self, interpreter)
+        self.qt_version = qt_version
+        # It is important that this list does not change order as the order of
+        # the returned ExternalPrograms will change as well
+        self.tools: T.Dict[str, T.Union[ExternalProgram, build.Executable]] = {
+            'moc': NonExistingExternalProgram('moc'),
+            'uic': NonExistingExternalProgram('uic'),
+            'rcc': NonExistingExternalProgram('rcc'),
+            'lrelease': NonExistingExternalProgram('lrelease'),
+        }
+        self.methods.update({
+            'has_tools': self.has_tools,
+            'preprocess': self.preprocess,
+            'compile_translations': self.compile_translations,
+            'compile_resources': self.compile_resources,
+            'compile_ui': self.compile_ui,
+            'compile_moc': self.compile_moc,
+        })
+
+    def compilers_detect(self, state: 'ModuleState', qt_dep: 'QtDependencyType') -> None:
+        """Detect Qt (4 or 5) moc, uic, rcc in the specified bindir or in PATH"""
+        wanted = f'== {qt_dep.version}'
+
+        def gen_bins() -> T.Generator[T.Tuple[str, str], None, None]:
+            for b in self.tools:
+                if qt_dep.bindir:
+                    yield os.path.join(qt_dep.bindir, b), b
+                if qt_dep.libexecdir:
+                    yield os.path.join(qt_dep.libexecdir, b), b
+                # prefer the (official)  or (unofficial) -qt
+                # of the tool to the plain one, as we
+                # don't know what the unsuffixed one points to without calling it.
+                yield f'{b}{qt_dep.qtver}', b
+                yield f'{b}-qt{qt_dep.qtver}', b
+                yield b, b
+
+        for b, name in gen_bins():
+            if self.tools[name].found():
+                continue
+
+            if name == 'lrelease':
+                arg = ['-version']
+            elif version_compare(qt_dep.version, '>= 5'):
+                arg = ['--version']
+            else:
+                arg = ['-v']
+
+            # Ensure that the version of qt and each tool are the same
+            def get_version(p: T.Union[ExternalProgram, build.Executable]) -> str:
+                _, out, err = Popen_safe(p.get_command() + arg)
+                if name == 'lrelease' or not qt_dep.version.startswith('4'):
+                    care = out
+                else:
+                    care = err
+                return care.rsplit(' ', maxsplit=1)[-1].replace(')', '').strip()
+
+            p = state.find_program(b, required=False,
+                                   version_func=get_version,
+                                   wanted=wanted)
+            if p.found():
+                self.tools[name] = p
+
+    def _detect_tools(self, state: 'ModuleState', method: str, required: bool = True) -> None:
+        if self._tools_detected:
+            return
+        self._tools_detected = True
+        mlog.log(f'Detecting Qt{self.qt_version} tools')
+        kwargs = {'required': required, 'modules': 'Core', 'method': method}
+        # Just pick one to make mypy happy
+        qt = T.cast('QtPkgConfigDependency', find_external_dependency(f'qt{self.qt_version}', state.environment, kwargs))
+        if qt.found():
+            # Get all tools and then make sure that they are the right version
+            self.compilers_detect(state, qt)
+            if version_compare(qt.version, '>=5.15.0'):
+                self._moc_supports_depfiles = True
+            else:
+                mlog.warning('moc dependencies will not work properly until you move to Qt >= 5.15', fatal=False)
+            if version_compare(qt.version, '>=5.14.0'):
+                self._rcc_supports_depfiles = True
+            else:
+                mlog.warning('rcc dependencies will not work properly until you move to Qt >= 5.14:',
+                             mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False)
+        else:
+            suffix = f'-qt{self.qt_version}'
+            self.tools['moc'] = NonExistingExternalProgram(name='moc' + suffix)
+            self.tools['uic'] = NonExistingExternalProgram(name='uic' + suffix)
+            self.tools['rcc'] = NonExistingExternalProgram(name='rcc' + suffix)
+            self.tools['lrelease'] = NonExistingExternalProgram(name='lrelease' + suffix)
+
+    @staticmethod
+    def _qrc_nodes(state: 'ModuleState', rcc_file: 'FileOrString') -> T.Tuple[str, T.List[str]]:
+        abspath: str
+        if isinstance(rcc_file, str):
+            abspath = os.path.join(state.environment.source_dir, state.subdir, rcc_file)
+        else:
+            abspath = rcc_file.absolute_path(state.environment.source_dir, state.environment.build_dir)
+        rcc_dirname = os.path.dirname(abspath)
+
+        # FIXME: what error are we actually trying to check here? (probably parse errors?)
+        try:
+            tree = ET.parse(abspath)
+            root = tree.getroot()
+            result: T.List[str] = []
+            for child in root[0]:
+                if child.tag != 'file':
+                    mlog.warning("malformed rcc file: ", os.path.join(state.subdir, str(rcc_file)))
+                    break
+                elif child.text is None:
+                    raise MesonException(f' element without a path in {os.path.join(state.subdir, str(rcc_file))}')
+                else:
+                    result.append(child.text)
+
+            return rcc_dirname, result
+        except MesonException:
+            raise
+        except Exception:
+            raise MesonException(f'Unable to parse resource file {abspath}')
+
+    def _parse_qrc_deps(self, state: 'ModuleState',
+                        rcc_file_: T.Union['FileOrString', build.CustomTarget, build.CustomTargetIndex, build.GeneratedList]) -> T.List[File]:
+        result: T.List[File] = []
+        inputs: T.Sequence['FileOrString'] = []
+        if isinstance(rcc_file_, (str, File)):
+            inputs = [rcc_file_]
+        else:
+            inputs = rcc_file_.get_outputs()
+
+        for rcc_file in inputs:
+            rcc_dirname, nodes = self._qrc_nodes(state, rcc_file)
+            for resource_path in nodes:
+                # We need to guess if the pointed resource is:
+                #   a) in build directory -> implies a generated file
+                #   b) in source directory
+                #   c) somewhere else external dependency file to bundle
+                #
+                # Also from qrc documentation: relative path are always from qrc file
+                # So relative path must always be computed from qrc file !
+                if os.path.isabs(resource_path):
+                    # a)
+                    if resource_path.startswith(os.path.abspath(state.environment.build_dir)):
+                        resource_relpath = os.path.relpath(resource_path, state.environment.build_dir)
+                        result.append(File(is_built=True, subdir='', fname=resource_relpath))
+                    # either b) or c)
+                    else:
+                        result.append(File(is_built=False, subdir=state.subdir, fname=resource_path))
+                else:
+                    path_from_rcc = os.path.normpath(os.path.join(rcc_dirname, resource_path))
+                    # a)
+                    if path_from_rcc.startswith(state.environment.build_dir):
+                        result.append(File(is_built=True, subdir=state.subdir, fname=resource_path))
+                    # b)
+                    else:
+                        result.append(File(is_built=False, subdir=state.subdir, fname=path_from_rcc))
+        return result
+
+    @FeatureNew('qt.has_tools', '0.54.0')
+    @noPosargs
+    @typed_kwargs(
+        'qt.has_tools',
+        KwargInfo('required', (bool, coredata.UserFeatureOption), default=False),
+        KwargInfo('method', str, default='auto'),
+    )
+    def has_tools(self, state: 'ModuleState', args: T.Tuple, kwargs: 'HasToolKwArgs') -> bool:
+        method = kwargs.get('method', 'auto')
+        # We have to cast here because TypedDicts are invariant, even though
+        # ExtractRequiredKwArgs is a subset of HasToolKwArgs, type checkers
+        # will insist this is wrong
+        disabled, required, feature = extract_required_kwarg(kwargs, state.subproject, default=False)
+        if disabled:
+            mlog.log('qt.has_tools skipped: feature', mlog.bold(feature), 'disabled')
+            return False
+        self._detect_tools(state, method, required=False)
+        for tool in self.tools.values():
+            if not tool.found():
+                if required:
+                    raise MesonException('Qt tools not found')
+                return False
+        return True
+
+    @FeatureNew('qt.compile_resources', '0.59.0')
+    @noPosargs
+    @typed_kwargs(
+        'qt.compile_resources',
+        KwargInfo('name', (str, NoneType)),
+        KwargInfo(
+            'sources',
+            ContainerTypeInfo(list, (File, str, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList), allow_empty=False),
+            listify=True,
+            required=True,
+        ),
+        KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+        KwargInfo('method', str, default='auto')
+    )
+    def compile_resources(self, state: 'ModuleState', args: T.Tuple, kwargs: 'ResourceCompilerKwArgs') -> ModuleReturnValue:
+        """Compile Qt resources files.
+
+        Uses CustomTargets to generate .cpp files from .qrc files.
+        """
+        if any(isinstance(s, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for s in kwargs['sources']):
+            FeatureNew.single_use('qt.compile_resources: custom_target or generator for "sources" keyword argument',
+                                  '0.60.0', state.subproject, location=state.current_node)
+        out = self._compile_resources_impl(state, kwargs)
+        return ModuleReturnValue(out, [out])
+
+    def _compile_resources_impl(self, state: 'ModuleState', kwargs: 'ResourceCompilerKwArgs') -> T.List[build.CustomTarget]:
+        # Avoid the FeatureNew when dispatching from preprocess
+        self._detect_tools(state, kwargs['method'])
+        if not self.tools['rcc'].found():
+            err_msg = ("{0} sources specified and couldn't find {1}, "
+                       "please check your qt{2} installation")
+            raise MesonException(err_msg.format('RCC', f'rcc-qt{self.qt_version}', self.qt_version))
+
+        # List of generated CustomTargets
+        targets: T.List[build.CustomTarget] = []
+
+        # depfile arguments
+        DEPFILE_ARGS: T.List[str] = ['--depfile', '@DEPFILE@'] if self._rcc_supports_depfiles else []
+
+        name = kwargs['name']
+        sources: T.List['FileOrString'] = []
+        for s in kwargs['sources']:
+            if isinstance(s, (str, File)):
+                sources.append(s)
+            else:
+                sources.extend(s.get_outputs())
+        extra_args = kwargs['extra_args']
+
+        # If a name was set generate a single .cpp file from all of the qrc
+        # files, otherwise generate one .cpp file per qrc file.
+        if name:
+            qrc_deps: T.List[File] = []
+            for s in sources:
+                qrc_deps.extend(self._parse_qrc_deps(state, s))
+
+            res_target = build.CustomTarget(
+                name,
+                state.subdir,
+                state.subproject,
+                state.environment,
+                self.tools['rcc'].get_command() + ['-name', name, '-o', '@OUTPUT@'] + extra_args + ['@INPUT@'] + DEPFILE_ARGS,
+                sources,
+                [f'{name}.cpp'],
+                depend_files=qrc_deps,
+                depfile=f'{name}.d',
+            )
+            targets.append(res_target)
+        else:
+            for rcc_file in sources:
+                qrc_deps = self._parse_qrc_deps(state, rcc_file)
+                if isinstance(rcc_file, str):
+                    basename = os.path.basename(rcc_file)
+                else:
+                    basename = os.path.basename(rcc_file.fname)
+                name = f'qt{self.qt_version}-{basename.replace(".", "_")}'
+                res_target = build.CustomTarget(
+                    name,
+                    state.subdir,
+                    state.subproject,
+                    state.environment,
+                    self.tools['rcc'].get_command() + ['-name', '@BASENAME@', '-o', '@OUTPUT@'] + extra_args + ['@INPUT@'] + DEPFILE_ARGS,
+                    [rcc_file],
+                    [f'{name}.cpp'],
+                    depend_files=qrc_deps,
+                    depfile=f'{name}.d',
+                )
+                targets.append(res_target)
+
+        return targets
+
+    @FeatureNew('qt.compile_ui', '0.59.0')
+    @noPosargs
+    @typed_kwargs(
+        'qt.compile_ui',
+        KwargInfo(
+            'sources',
+            ContainerTypeInfo(list, (File, str, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList), allow_empty=False),
+            listify=True,
+            required=True,
+        ),
+        KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+        KwargInfo('method', str, default='auto')
+    )
+    def compile_ui(self, state: 'ModuleState', args: T.Tuple, kwargs: 'UICompilerKwArgs') -> ModuleReturnValue:
+        """Compile UI resources into cpp headers."""
+        if any(isinstance(s, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for s in kwargs['sources']):
+            FeatureNew.single_use('qt.compile_ui: custom_target or generator for "sources" keyword argument',
+                                  '0.60.0', state.subproject, location=state.current_node)
+        out = self._compile_ui_impl(state, kwargs)
+        return ModuleReturnValue(out, [out])
+
+    def _compile_ui_impl(self, state: 'ModuleState', kwargs: 'UICompilerKwArgs') -> build.GeneratedList:
+        # Avoid the FeatureNew when dispatching from preprocess
+        self._detect_tools(state, kwargs['method'])
+        if not self.tools['uic'].found():
+            err_msg = ("{0} sources specified and couldn't find {1}, "
+                       "please check your qt{2} installation")
+            raise MesonException(err_msg.format('UIC', f'uic-qt{self.qt_version}', self.qt_version))
+
+        # TODO: This generator isn't added to the generator list in the Interpreter
+        gen = build.Generator(
+            self.tools['uic'],
+            kwargs['extra_args'] + ['-o', '@OUTPUT@', '@INPUT@'],
+            ['ui_@BASENAME@.h'],
+            name=f'Qt{self.qt_version} ui')
+        return gen.process_files(kwargs['sources'], state)
+
+    @FeatureNew('qt.compile_moc', '0.59.0')
+    @noPosargs
+    @typed_kwargs(
+        'qt.compile_moc',
+        KwargInfo(
+            'sources',
+            ContainerTypeInfo(list, (File, str, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)),
+            listify=True,
+            default=[],
+        ),
+        KwargInfo(
+            'headers',
+            ContainerTypeInfo(list, (File, str, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)),
+            listify=True,
+            default=[]
+        ),
+        KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]),
+        KwargInfo('method', str, default='auto'),
+        KwargInfo('include_directories', ContainerTypeInfo(list, (build.IncludeDirs, str)), listify=True, default=[]),
+        KwargInfo('dependencies', ContainerTypeInfo(list, (Dependency, ExternalLibrary)), listify=True, default=[]),
+    )
+    def compile_moc(self, state: 'ModuleState', args: T.Tuple, kwargs: 'MocCompilerKwArgs') -> ModuleReturnValue:
+        if any(isinstance(s, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for s in kwargs['headers']):
+            FeatureNew.single_use('qt.compile_moc: custom_target or generator for "headers" keyword argument',
+                                  '0.60.0', state.subproject, location=state.current_node)
+        if any(isinstance(s, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for s in kwargs['sources']):
+            FeatureNew.single_use('qt.compile_moc: custom_target or generator for "sources" keyword argument',
+                                  '0.60.0', state.subproject, location=state.current_node)
+        out = self._compile_moc_impl(state, kwargs)
+        return ModuleReturnValue(out, [out])
+
+    def _compile_moc_impl(self, state: 'ModuleState', kwargs: 'MocCompilerKwArgs') -> T.List[build.GeneratedList]:
+        # Avoid the FeatureNew when dispatching from preprocess
+        self._detect_tools(state, kwargs['method'])
+        if not self.tools['moc'].found():
+            err_msg = ("{0} sources specified and couldn't find {1}, "
+                       "please check your qt{2} installation")
+            raise MesonException(err_msg.format('MOC', f'uic-qt{self.qt_version}', self.qt_version))
+
+        if not (kwargs['headers'] or kwargs['sources']):
+            raise build.InvalidArguments('At least one of the "headers" or "sources" keyword arguments must be provided and not empty')
+
+        inc = state.get_include_args(include_dirs=kwargs['include_directories'])
+        compile_args: T.List[str] = []
+        for dep in kwargs['dependencies']:
+            compile_args.extend([a for a in dep.get_all_compile_args() if a.startswith(('-I', '-D'))])
+
+        output: T.List[build.GeneratedList] = []
+
+        # depfile arguments (defaults to .d)
+        DEPFILE_ARGS: T.List[str] = ['--output-dep-file'] if self._moc_supports_depfiles else []
+
+        arguments = kwargs['extra_args'] + DEPFILE_ARGS + inc + compile_args + ['@INPUT@', '-o', '@OUTPUT@']
+        if kwargs['headers']:
+            moc_gen = build.Generator(
+                self.tools['moc'], arguments, ['moc_@BASENAME@.cpp'],
+                depfile='moc_@BASENAME@.cpp.d',
+                name=f'Qt{self.qt_version} moc header')
+            output.append(moc_gen.process_files(kwargs['headers'], state))
+        if kwargs['sources']:
+            moc_gen = build.Generator(
+                self.tools['moc'], arguments, ['@BASENAME@.moc'],
+                depfile='@BASENAME.moc.d@',
+                name=f'Qt{self.qt_version} moc source')
+            output.append(moc_gen.process_files(kwargs['sources'], state))
+
+        return output
+
+    # We can't use typed_pos_args here, the signature is ambiguous
+    @typed_kwargs(
+        'qt.preprocess',
+        KwargInfo('sources', ContainerTypeInfo(list, (File, str)), listify=True, default=[], deprecated='0.59.0'),
+        KwargInfo('qresources', ContainerTypeInfo(list, (File, str)), listify=True, default=[]),
+        KwargInfo('ui_files', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+        KwargInfo('moc_sources', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+        KwargInfo('moc_headers', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]),
+        KwargInfo('moc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.44.0'),
+        KwargInfo('rcc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.49.0'),
+        KwargInfo('uic_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.49.0'),
+        KwargInfo('method', str, default='auto'),
+        KwargInfo('include_directories', ContainerTypeInfo(list, (build.IncludeDirs, str)), listify=True, default=[]),
+        KwargInfo('dependencies', ContainerTypeInfo(list, (Dependency, ExternalLibrary)), listify=True, default=[]),
+    )
+    def preprocess(self, state: 'ModuleState', args: T.List[T.Union[str, File]], kwargs: 'PreprocessKwArgs') -> ModuleReturnValue:
+        _sources = args[1:]
+        if _sources:
+            FeatureDeprecated.single_use('qt.preprocess positional sources', '0.59', state.subproject, location=state.current_node)
+        # List is invariant, os we have to cast...
+        sources = T.cast('T.List[T.Union[str, File, build.GeneratedList, build.CustomTarget]]',
+                         _sources + kwargs['sources'])
+        for s in sources:
+            if not isinstance(s, (str, File)):
+                raise build.InvalidArguments('Variadic arguments to qt.preprocess must be Strings or Files')
+        method = kwargs['method']
+
+        if kwargs['qresources']:
+            # custom output name set? -> one output file, multiple otherwise
+            rcc_kwargs: 'ResourceCompilerKwArgs' = {'name': '', 'sources': kwargs['qresources'], 'extra_args': kwargs['rcc_extra_arguments'], 'method': method}
+            if args:
+                name = args[0]
+                if not isinstance(name, str):
+                    raise build.InvalidArguments('First argument to qt.preprocess must be a string')
+                rcc_kwargs['name'] = name
+            sources.extend(self._compile_resources_impl(state, rcc_kwargs))
+
+        if kwargs['ui_files']:
+            ui_kwargs: 'UICompilerKwArgs' = {'sources': kwargs['ui_files'], 'extra_args': kwargs['uic_extra_arguments'], 'method': method}
+            sources.append(self._compile_ui_impl(state, ui_kwargs))
+
+        if kwargs['moc_headers'] or kwargs['moc_sources']:
+            moc_kwargs: 'MocCompilerKwArgs' = {
+                'extra_args': kwargs['moc_extra_arguments'],
+                'sources': kwargs['moc_sources'],
+                'headers': kwargs['moc_headers'],
+                'include_directories': kwargs['include_directories'],
+                'dependencies': kwargs['dependencies'],
+                'method': method,
+            }
+            sources.extend(self._compile_moc_impl(state, moc_kwargs))
+
+        return ModuleReturnValue(sources, [sources])
+
+    @FeatureNew('qt.compile_translations', '0.44.0')
+    @noPosargs
+    @typed_kwargs(
+        'qt.compile_translations',
+        KwargInfo('build_by_default', bool, default=False),
+        INSTALL_KW,
+        INSTALL_DIR_KW,
+        KwargInfo('method', str, default='auto'),
+        KwargInfo('qresource', (str, NoneType), since='0.56.0'),
+        KwargInfo('rcc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.56.0'),
+        KwargInfo('ts_files', ContainerTypeInfo(list, (str, File, build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)), listify=True, default=[]),
+    )
+    def compile_translations(self, state: 'ModuleState', args: T.Tuple, kwargs: 'CompileTranslationsKwArgs') -> ModuleReturnValue:
+        ts_files = kwargs['ts_files']
+        if any(isinstance(s, (build.CustomTarget, build.CustomTargetIndex, build.GeneratedList)) for s in ts_files):
+            FeatureNew.single_use('qt.compile_translations: custom_target or generator for "ts_files" keyword argument',
+                                  '0.60.0', state.subproject, location=state.current_node)
+        if kwargs['install'] and not kwargs['install_dir']:
+            raise MesonException('qt.compile_translations: "install_dir" keyword argument must be set when "install" is true.')
+        qresource = kwargs['qresource']
+        if qresource:
+            if ts_files:
+                raise MesonException('qt.compile_translations: Cannot specify both ts_files and qresource')
+            if os.path.dirname(qresource) != '':
+                raise MesonException('qt.compile_translations: qresource file name must not contain a subdirectory.')
+            qresource_file = File.from_built_file(state.subdir, qresource)
+            infile_abs = os.path.join(state.environment.source_dir, qresource_file.relative_name())
+            outfile_abs = os.path.join(state.environment.build_dir, qresource_file.relative_name())
+            os.makedirs(os.path.dirname(outfile_abs), exist_ok=True)
+            shutil.copy2(infile_abs, outfile_abs)
+            self.interpreter.add_build_def_file(infile_abs)
+
+            _, nodes = self._qrc_nodes(state, qresource_file)
+            for c in nodes:
+                if c.endswith('.qm'):
+                    ts_files.append(c.rstrip('.qm') + '.ts')
+                else:
+                    raise MesonException(f'qt.compile_translations: qresource can only contain qm files, found {c}')
+            results = self.preprocess(state, [], {'qresources': qresource_file, 'rcc_extra_arguments': kwargs['rcc_extra_arguments']})
+        self._detect_tools(state, kwargs['method'])
+        translations: T.List[build.CustomTarget] = []
+        for ts in ts_files:
+            if not self.tools['lrelease'].found():
+                raise MesonException('qt.compile_translations: ' +
+                                     self.tools['lrelease'].name + ' not found')
+            if qresource:
+                # In this case we know that ts_files is always a List[str], as
+                # it's generated above and no ts_files are passed in. However,
+                # mypy can't figure that out so we use assert to assure it that
+                # what we're doing is safe
+                assert isinstance(ts, str), 'for mypy'
+                outdir = os.path.dirname(os.path.normpath(os.path.join(state.subdir, ts)))
+                ts = os.path.basename(ts)
+            else:
+                outdir = state.subdir
+            cmd: T.List[T.Union[ExternalProgram, build.Executable, str]] = [self.tools['lrelease'], '@INPUT@', '-qm', '@OUTPUT@']
+            lrelease_target = build.CustomTarget(
+                f'qt{self.qt_version}-compile-{ts}',
+                outdir,
+                state.subproject,
+                state.environment,
+                cmd,
+                [ts],
+                ['@BASENAME@.qm'],
+                install=kwargs['install'],
+                install_dir=[kwargs['install_dir']],
+                install_tag=['i18n'],
+                build_by_default=kwargs['build_by_default'],
+            )
+            translations.append(lrelease_target)
+        if qresource:
+            return ModuleReturnValue(results.return_value[0], [results.new_objects, translations])
+        else:
+            return ModuleReturnValue(translations, [translations])
diff --git a/vendored-meson/meson/mesonbuild/modules/qt4.py b/vendored-meson/meson/mesonbuild/modules/qt4.py
new file mode 100644
index 000000000000..6bdf1c51db1f
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/qt4.py
@@ -0,0 +1,34 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+import typing as T
+
+from .qt import QtBaseModule
+from . import ModuleInfo
+
+if T.TYPE_CHECKING:
+    from ..interpreter import Interpreter
+
+
+class Qt4Module(QtBaseModule):
+
+    INFO = ModuleInfo('qt4')
+
+    def __init__(self, interpreter: Interpreter):
+        QtBaseModule.__init__(self, interpreter, qt_version=4)
+
+
+def initialize(interp: Interpreter) -> Qt4Module:
+    return Qt4Module(interp)
diff --git a/vendored-meson/meson/mesonbuild/modules/qt5.py b/vendored-meson/meson/mesonbuild/modules/qt5.py
new file mode 100644
index 000000000000..d9f0a5ecd13d
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/qt5.py
@@ -0,0 +1,34 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+import typing as T
+
+from .qt import QtBaseModule
+from . import ModuleInfo
+
+if T.TYPE_CHECKING:
+    from ..interpreter import Interpreter
+
+
+class Qt5Module(QtBaseModule):
+
+    INFO = ModuleInfo('qt5')
+
+    def __init__(self, interpreter: Interpreter):
+        QtBaseModule.__init__(self, interpreter, qt_version=5)
+
+
+def initialize(interp: Interpreter) -> Qt5Module:
+    return Qt5Module(interp)
diff --git a/vendored-meson/meson/mesonbuild/modules/qt6.py b/vendored-meson/meson/mesonbuild/modules/qt6.py
new file mode 100644
index 000000000000..cafc531af894
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/qt6.py
@@ -0,0 +1,33 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+import typing as T
+
+from .qt import QtBaseModule
+from . import ModuleInfo
+
+if T.TYPE_CHECKING:
+    from ..interpreter import Interpreter
+
+class Qt6Module(QtBaseModule):
+
+    INFO = ModuleInfo('qt6', '0.57.0')
+
+    def __init__(self, interpreter: Interpreter):
+        QtBaseModule.__init__(self, interpreter, qt_version=6)
+
+
+def initialize(interp: Interpreter) -> Qt6Module:
+    return Qt6Module(interp)
diff --git a/vendored-meson/meson/mesonbuild/modules/rust.py b/vendored-meson/meson/mesonbuild/modules/rust.py
new file mode 100644
index 000000000000..3514412e65df
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/rust.py
@@ -0,0 +1,272 @@
+# Copyright © 2020-2023 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import typing as T
+
+from . import ExtensionModule, ModuleReturnValue, ModuleInfo
+from .. import mlog
+from ..build import BothLibraries, BuildTarget, CustomTargetIndex, Executable, ExtractedObjects, GeneratedList, IncludeDirs, CustomTarget, InvalidArguments, Jar, StructuredSources
+from ..compilers.compilers import are_asserts_disabled
+from ..dependencies import Dependency, ExternalLibrary
+from ..interpreter.type_checking import DEPENDENCIES_KW, LINK_WITH_KW, TEST_KWS, OUTPUT_KW, INCLUDE_DIRECTORIES
+from ..interpreterbase import ContainerTypeInfo, InterpreterException, KwargInfo, typed_kwargs, typed_pos_args, noPosargs
+from ..mesonlib import File
+
+if T.TYPE_CHECKING:
+    from . import ModuleState
+    from ..build import LibTypes
+    from ..interpreter import Interpreter
+    from ..interpreter import kwargs as _kwargs
+    from ..interpreter.interpreter import SourceInputs, SourceOutputs
+    from ..programs import ExternalProgram, OverrideProgram
+
+    from typing_extensions import TypedDict
+
+    class FuncTest(_kwargs.BaseTest):
+
+        dependencies: T.List[T.Union[Dependency, ExternalLibrary]]
+        is_parallel: bool
+        link_with: T.List[LibTypes]
+        rust_args: T.List[str]
+
+    class FuncBindgen(TypedDict):
+
+        args: T.List[str]
+        c_args: T.List[str]
+        include_directories: T.List[IncludeDirs]
+        input: T.List[SourceInputs]
+        output: str
+        dependencies: T.List[T.Union[Dependency, ExternalLibrary]]
+
+
+class RustModule(ExtensionModule):
+
+    """A module that holds helper functions for rust."""
+
+    INFO = ModuleInfo('rust', '0.57.0', stabilized='1.0.0')
+
+    def __init__(self, interpreter: Interpreter) -> None:
+        super().__init__(interpreter)
+        self._bindgen_bin: T.Optional[T.Union[ExternalProgram, Executable, OverrideProgram]] = None
+        self.methods.update({
+            'test': self.test,
+            'bindgen': self.bindgen,
+        })
+
+    @typed_pos_args('rust.test', str, BuildTarget)
+    @typed_kwargs(
+        'rust.test',
+        *TEST_KWS,
+        DEPENDENCIES_KW,
+        LINK_WITH_KW.evolve(since='1.2.0'),
+        KwargInfo(
+            'rust_args',
+            ContainerTypeInfo(list, str),
+            listify=True,
+            default=[],
+            since='1.2.0',
+        ),
+        KwargInfo('is_parallel', bool, default=False),
+    )
+    def test(self, state: ModuleState, args: T.Tuple[str, BuildTarget], kwargs: FuncTest) -> ModuleReturnValue:
+        """Generate a rust test target from a given rust target.
+
+        Rust puts it's unitests inside it's main source files, unlike most
+        languages that put them in external files. This means that normally
+        you have to define two separate targets with basically the same
+        arguments to get tests:
+
+        ```meson
+        rust_lib_sources = [...]
+        rust_lib = static_library(
+            'rust_lib',
+            rust_lib_sources,
+        )
+
+        rust_lib_test = executable(
+            'rust_lib_test',
+            rust_lib_sources,
+            rust_args : ['--test'],
+        )
+
+        test(
+            'rust_lib_test',
+            rust_lib_test,
+            protocol : 'rust',
+        )
+        ```
+
+        This is all fine, but not very DRY. This method makes it much easier
+        to define rust tests:
+
+        ```meson
+        rust = import('unstable-rust')
+
+        rust_lib = static_library(
+            'rust_lib',
+            [sources],
+        )
+
+        rust.test('rust_lib_test', rust_lib)
+        ```
+        """
+        if any(isinstance(t, Jar) for t in kwargs.get('link_with', [])):
+            raise InvalidArguments('Rust tests cannot link with Jar targets')
+
+        name = args[0]
+        base_target: BuildTarget = args[1]
+        if not base_target.uses_rust():
+            raise InterpreterException('Second positional argument to rustmod.test() must be a rust based target')
+        extra_args = kwargs['args']
+
+        # Delete any arguments we don't want passed
+        if '--test' in extra_args:
+            mlog.warning('Do not add --test to rustmod.test arguments')
+            extra_args.remove('--test')
+        if '--format' in extra_args:
+            mlog.warning('Do not add --format to rustmod.test arguments')
+            i = extra_args.index('--format')
+            # Also delete the argument to --format
+            del extra_args[i + 1]
+            del extra_args[i]
+        for i, a in enumerate(extra_args):
+            if isinstance(a, str) and a.startswith('--format='):
+                del extra_args[i]
+                break
+
+        # We need to cast here, as currently these don't have protocol in them, but test itself does.
+        tkwargs = T.cast('_kwargs.FuncTest', kwargs.copy())
+
+        tkwargs['args'] = extra_args + ['--test', '--format', 'pretty']
+        tkwargs['protocol'] = 'rust'
+
+        new_target_kwargs = base_target.original_kwargs.copy()
+        # Don't mutate the shallow copied list, instead replace it with a new
+        # one
+        new_target_kwargs['rust_args'] = \
+            new_target_kwargs.get('rust_args', []) + kwargs['rust_args'] + ['--test']
+        new_target_kwargs['install'] = False
+        new_target_kwargs['dependencies'] = new_target_kwargs.get('dependencies', []) + kwargs['dependencies']
+        new_target_kwargs['link_with'] = new_target_kwargs.get('link_with', []) + kwargs['link_with']
+
+        sources = T.cast('T.List[SourceOutputs]', base_target.sources.copy())
+        sources.extend(base_target.generated)
+
+        new_target = Executable(
+            name, base_target.subdir, state.subproject, base_target.for_machine,
+            sources, base_target.structured_sources,
+            base_target.objects, base_target.environment, base_target.compilers,
+            new_target_kwargs
+        )
+
+        test = self.interpreter.make_test(
+            self.interpreter.current_node, (name, new_target), tkwargs)
+
+        return ModuleReturnValue(None, [new_target, test])
+
+    @noPosargs
+    @typed_kwargs(
+        'rust.bindgen',
+        KwargInfo('c_args', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo('args', ContainerTypeInfo(list, str), default=[], listify=True),
+        KwargInfo(
+            'input',
+            ContainerTypeInfo(list, (File, GeneratedList, BuildTarget, BothLibraries, ExtractedObjects, CustomTargetIndex, CustomTarget, str), allow_empty=False),
+            default=[],
+            listify=True,
+            required=True,
+        ),
+        INCLUDE_DIRECTORIES.evolve(since_values={ContainerTypeInfo(list, str): '1.0.0'}),
+        OUTPUT_KW,
+        DEPENDENCIES_KW.evolve(since='1.0.0'),
+    )
+    def bindgen(self, state: ModuleState, args: T.List, kwargs: FuncBindgen) -> ModuleReturnValue:
+        """Wrapper around bindgen to simplify it's use.
+
+        The main thing this simplifies is the use of `include_directory`
+        objects, instead of having to pass a plethora of `-I` arguments.
+        """
+        header, *_deps = self.interpreter.source_strings_to_files(kwargs['input'])
+
+        # Split File and Target dependencies to add pass to CustomTarget
+        depends: T.List[SourceOutputs] = []
+        depend_files: T.List[File] = []
+        for d in _deps:
+            if isinstance(d, File):
+                depend_files.append(d)
+            else:
+                depends.append(d)
+
+        # Copy to avoid subsequent calls mutating the original
+        # TODO: if we want this to be per-machine we'll need a native kwarg
+        clang_args = state.environment.properties.host.get_bindgen_clang_args().copy()
+
+        for i in state.process_include_dirs(kwargs['include_directories']):
+            # bindgen always uses clang, so it's safe to hardcode -I here
+            clang_args.extend([f'-I{x}' for x in i.to_string_list(
+                state.environment.get_source_dir(), state.environment.get_build_dir())])
+        if are_asserts_disabled(state.environment.coredata.options):
+            clang_args.append('-DNDEBUG')
+
+        for de in kwargs['dependencies']:
+            for i in de.get_include_dirs():
+                clang_args.extend([f'-I{x}' for x in i.to_string_list(
+                    state.environment.get_source_dir(), state.environment.get_build_dir())])
+            clang_args.extend(de.get_all_compile_args())
+            for s in de.get_sources():
+                if isinstance(s, File):
+                    depend_files.append(s)
+                elif isinstance(s, CustomTarget):
+                    depends.append(s)
+
+        if self._bindgen_bin is None:
+            self._bindgen_bin = state.find_program('bindgen')
+
+        name: str
+        if isinstance(header, File):
+            name = header.fname
+        elif isinstance(header, (BuildTarget, BothLibraries, ExtractedObjects, StructuredSources)):
+            raise InterpreterException('bindgen source file must be a C header, not an object or build target')
+        else:
+            name = header.get_outputs()[0]
+
+        cmd = self._bindgen_bin.get_command() + \
+            [
+                '@INPUT@', '--output',
+                os.path.join(state.environment.build_dir, '@OUTPUT@')
+            ] + \
+            kwargs['args'] + ['--'] + kwargs['c_args'] + clang_args + \
+            ['-MD', '-MQ', '@INPUT@', '-MF', '@DEPFILE@']
+
+        target = CustomTarget(
+            f'rustmod-bindgen-{name}'.replace('/', '_'),
+            state.subdir,
+            state.subproject,
+            state.environment,
+            cmd,
+            [header],
+            [kwargs['output']],
+            depfile='@PLAINNAME@.d',
+            extra_depends=depends,
+            depend_files=depend_files,
+            backend=state.backend,
+        )
+
+        return ModuleReturnValue([target], [target])
+
+
+def initialize(interp: Interpreter) -> RustModule:
+    return RustModule(interp)
diff --git a/vendored-meson/meson/mesonbuild/modules/simd.py b/vendored-meson/meson/mesonbuild/modules/simd.py
new file mode 100644
index 000000000000..3ee0858b3287
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/simd.py
@@ -0,0 +1,88 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from .. import mesonlib, compilers, mlog
+from .. import build
+
+from . import ExtensionModule, ModuleInfo
+
+class SimdModule(ExtensionModule):
+
+    INFO = ModuleInfo('SIMD', '0.42.0', unstable=True)
+
+    def __init__(self, interpreter):
+        super().__init__(interpreter)
+        # FIXME add Altivec and AVX512.
+        self.isets = ('mmx',
+                      'sse',
+                      'sse2',
+                      'sse3',
+                      'ssse3',
+                      'sse41',
+                      'sse42',
+                      'avx',
+                      'avx2',
+                      'neon',
+                      )
+        self.methods.update({
+            'check': self.check,
+        })
+
+    def check(self, state, args, kwargs):
+        result = []
+        if len(args) != 1:
+            raise mesonlib.MesonException('Check requires one argument, a name prefix for checks.')
+        prefix = args[0]
+        if not isinstance(prefix, str):
+            raise mesonlib.MesonException('Argument must be a string.')
+        if 'compiler' not in kwargs:
+            raise mesonlib.MesonException('Must specify compiler keyword')
+        if 'sources' in kwargs:
+            raise mesonlib.MesonException('SIMD module does not support the "sources" keyword')
+        basic_kwargs = {}
+        for key, value in kwargs.items():
+            if key not in self.isets and key != 'compiler':
+                basic_kwargs[key] = value
+        compiler = kwargs['compiler']
+        if not isinstance(compiler, compilers.compilers.Compiler):
+            raise mesonlib.MesonException('Compiler argument must be a compiler object.')
+        conf = build.ConfigurationData()
+        for iset in self.isets:
+            if iset not in kwargs:
+                continue
+            iset_fname = kwargs[iset] # Might also be an array or Files. static_library will validate.
+            args = compiler.get_instruction_set_args(iset)
+            if args is None:
+                mlog.log('Compiler supports %s:' % iset, mlog.red('NO'))
+                continue
+            if args:
+                if not compiler.has_multi_arguments(args, state.environment)[0]:
+                    mlog.log('Compiler supports %s:' % iset, mlog.red('NO'))
+                    continue
+            mlog.log('Compiler supports %s:' % iset, mlog.green('YES'))
+            conf.values['HAVE_' + iset.upper()] = ('1', 'Compiler supports %s.' % iset)
+            libname = prefix + '_' + iset
+            lib_kwargs = {'sources': iset_fname,
+                          }
+            lib_kwargs.update(basic_kwargs)
+            langarg_key = compiler.get_language() + '_args'
+            old_lang_args = mesonlib.extract_as_list(lib_kwargs, langarg_key)
+            all_lang_args = old_lang_args + args
+            lib_kwargs[langarg_key] = all_lang_args
+            result.append(self.interpreter.func_static_lib(None, [libname], lib_kwargs))
+        return [result, conf]
+
+def initialize(*args, **kwargs):
+    return SimdModule(*args, **kwargs)
diff --git a/vendored-meson/meson/mesonbuild/modules/sourceset.py b/vendored-meson/meson/mesonbuild/modules/sourceset.py
new file mode 100644
index 000000000000..c35416e5a800
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/sourceset.py
@@ -0,0 +1,307 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+import typing as T
+
+from . import ExtensionModule, ModuleObject, MutableModuleObject, ModuleInfo
+from .. import build
+from .. import dependencies
+from .. import mesonlib
+from ..interpreterbase import (
+    noPosargs, noKwargs,
+    InterpreterException, InvalidArguments, InvalidCode, FeatureNew,
+)
+from ..interpreterbase.decorators import ContainerTypeInfo, KwargInfo, typed_kwargs, typed_pos_args
+from ..mesonlib import OrderedSet
+
+if T.TYPE_CHECKING:
+    from typing_extensions import TypedDict
+
+    from . import ModuleState
+    from ..interpreter import Interpreter
+    from ..interpreterbase import TYPE_var, TYPE_kwargs
+
+    class AddKwargs(TypedDict):
+
+        when: T.List[T.Union[str, dependencies.Dependency]]
+        if_true: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes, dependencies.Dependency]]
+        if_false: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]]
+
+    class AddAllKw(TypedDict):
+
+        when: T.List[T.Union[str, dependencies.Dependency]]
+        if_true: T.List[SourceSetImpl]
+
+    class ApplyKw(TypedDict):
+
+        strict: bool
+
+
+_WHEN_KW: KwargInfo[T.List[T.Union[str, dependencies.Dependency]]] = KwargInfo(
+    'when',
+    ContainerTypeInfo(list, (str, dependencies.Dependency)),
+    listify=True,
+    default=[],
+)
+
+
+class SourceSetRule(T.NamedTuple):
+    keys: T.List[str]
+    """Configuration keys that enable this rule if true"""
+
+    deps: T.List[dependencies.Dependency]
+    """Dependencies that enable this rule if true"""
+
+    sources: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]]
+    """Source files added when this rule's conditions are true"""
+
+    extra_deps: T.List[dependencies.Dependency]
+    """Dependencies added when this rule's conditions are true, but
+       that do not make the condition false if they're absent."""
+
+    sourcesets: T.List[SourceSetImpl]
+    """Other sourcesets added when this rule's conditions are true"""
+
+    if_false: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]]
+    """Source files added when this rule's conditions are false"""
+
+
+class SourceFiles(T.NamedTuple):
+    sources: OrderedSet[T.Union[mesonlib.FileOrString, build.GeneratedTypes]]
+    deps: OrderedSet[dependencies.Dependency]
+
+
+class SourceSet:
+    """Base class to avoid circular references.
+
+    Because of error messages, this class is called SourceSet, and the actual
+    implementation is an Impl.
+    """
+
+
+class SourceSetImpl(SourceSet, MutableModuleObject):
+    def __init__(self, interpreter: Interpreter):
+        super().__init__()
+        self.rules: T.List[SourceSetRule] = []
+        self.subproject = interpreter.subproject
+        self.environment = interpreter.environment
+        self.subdir = interpreter.subdir
+        self.frozen = False
+        self.methods.update({
+            'add': self.add_method,
+            'add_all': self.add_all_method,
+            'all_sources': self.all_sources_method,
+            'all_dependencies': self.all_dependencies_method,
+            'apply': self.apply_method,
+        })
+
+    def check_source_files(self, args: T.Sequence[T.Union[mesonlib.FileOrString, build.GeneratedTypes, dependencies.Dependency]],
+                           ) -> T.Tuple[T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]], T.List[dependencies.Dependency]]:
+        sources: T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]] = []
+        deps: T.List[dependencies.Dependency] = []
+        for x in args:
+            if isinstance(x, dependencies.Dependency):
+                deps.append(x)
+            else:
+                sources.append(x)
+        to_check: T.List[str] = []
+
+        # Get the actual output names to check
+        for s in sources:
+            if isinstance(s, str):
+                to_check.append(s)
+            elif isinstance(s, mesonlib.File):
+                to_check.append(s.fname)
+            else:
+                to_check.extend(s.get_outputs())
+        mesonlib.check_direntry_issues(to_check)
+        return sources, deps
+
+    def check_conditions(self, args: T.Sequence[T.Union[str, dependencies.Dependency]]
+                         ) -> T.Tuple[T.List[str], T.List[dependencies.Dependency]]:
+        keys: T.List[str] = []
+        deps: T.List[dependencies.Dependency] = []
+        for x in args:
+            if isinstance(x, str):
+                keys.append(x)
+            else:
+                deps.append(x)
+        return keys, deps
+
+    @typed_pos_args('sourceset.add', varargs=(str, mesonlib.File, build.GeneratedList, build.CustomTarget, build.CustomTargetIndex, dependencies.Dependency))
+    @typed_kwargs(
+        'sourceset.add',
+        _WHEN_KW,
+        KwargInfo(
+            'if_true',
+            ContainerTypeInfo(list, (str, mesonlib.File, build.GeneratedList, build.CustomTarget, build.CustomTargetIndex, dependencies.Dependency)),
+            listify=True,
+            default=[],
+        ),
+        KwargInfo(
+            'if_false',
+            ContainerTypeInfo(list, (str, mesonlib.File, build.GeneratedList, build.CustomTarget, build.CustomTargetIndex)),
+            listify=True,
+            default=[],
+        ),
+    )
+    def add_method(self, state: ModuleState,
+                   args: T.Tuple[T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes, dependencies.Dependency]]],
+                   kwargs: AddKwargs) -> None:
+        if self.frozen:
+            raise InvalidCode('Tried to use \'add\' after querying the source set')
+        when = kwargs['when']
+        if_true = kwargs['if_true']
+        if_false = kwargs['if_false']
+        if not any([when, if_true, if_false]):
+            if_true = args[0]
+        elif args[0]:
+            raise InterpreterException('add called with both positional and keyword arguments')
+        keys, dependencies = self.check_conditions(when)
+        sources, extra_deps = self.check_source_files(if_true)
+        if_false, _ = self.check_source_files(if_false)
+        self.rules.append(SourceSetRule(keys, dependencies, sources, extra_deps, [], if_false))
+
+    @typed_pos_args('sourceset.add_all', varargs=SourceSet)
+    @typed_kwargs(
+        'sourceset.add_all',
+        _WHEN_KW,
+        KwargInfo(
+            'if_true',
+            ContainerTypeInfo(list, SourceSet),
+            listify=True,
+            default=[],
+        )
+    )
+    def add_all_method(self, state: ModuleState, args: T.Tuple[T.List[SourceSetImpl]],
+                       kwargs: AddAllKw) -> None:
+        if self.frozen:
+            raise InvalidCode('Tried to use \'add_all\' after querying the source set')
+        when = kwargs['when']
+        if_true = kwargs['if_true']
+        if not when and not if_true:
+            if_true = args[0]
+        elif args[0]:
+            raise InterpreterException('add_all called with both positional and keyword arguments')
+        keys, dependencies = self.check_conditions(when)
+        for s in if_true:
+            if not isinstance(s, SourceSetImpl):
+                raise InvalidCode('Arguments to \'add_all\' after the first must be source sets')
+            s.frozen = True
+        self.rules.append(SourceSetRule(keys, dependencies, [], [], if_true, []))
+
+    def collect(self, enabled_fn: T.Callable[[str], bool],
+                all_sources: bool,
+                into: T.Optional['SourceFiles'] = None) -> SourceFiles:
+        if not into:
+            into = SourceFiles(OrderedSet(), OrderedSet())
+        for entry in self.rules:
+            if all(x.found() for x in entry.deps) and \
+               all(enabled_fn(key) for key in entry.keys):
+                into.sources.update(entry.sources)
+                into.deps.update(entry.deps)
+                into.deps.update(entry.extra_deps)
+                for ss in entry.sourcesets:
+                    ss.collect(enabled_fn, all_sources, into)
+                if not all_sources:
+                    continue
+            into.sources.update(entry.if_false)
+        return into
+
+    @noKwargs
+    @noPosargs
+    def all_sources_method(self, state: ModuleState, args: T.List[TYPE_var], kwargs: TYPE_kwargs
+                           ) -> T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]]:
+        self.frozen = True
+        files = self.collect(lambda x: True, True)
+        return list(files.sources)
+
+    @noKwargs
+    @noPosargs
+    @FeatureNew('source_set.all_dependencies() method', '0.52.0')
+    def all_dependencies_method(self, state: ModuleState, args: T.List[TYPE_var], kwargs: TYPE_kwargs
+                                ) -> T.List[dependencies.Dependency]:
+        self.frozen = True
+        files = self.collect(lambda x: True, True)
+        return list(files.deps)
+
+    @typed_pos_args('sourceset.apply', (build.ConfigurationData, dict))
+    @typed_kwargs('sourceset.apply', KwargInfo('strict', bool, default=True))
+    def apply_method(self, state: ModuleState, args: T.Tuple[T.Union[build.ConfigurationData, T.Dict[str, TYPE_var]]], kwargs: ApplyKw) -> SourceFilesObject:
+        config_data = args[0]
+        self.frozen = True
+        strict = kwargs['strict']
+        if isinstance(config_data, dict):
+            def _get_from_config_data(key: str) -> bool:
+                assert isinstance(config_data, dict), 'for mypy'
+                if strict and key not in config_data:
+                    raise InterpreterException(f'Entry {key} not in configuration dictionary.')
+                return bool(config_data.get(key, False))
+        else:
+            config_cache: T.Dict[str, bool] = {}
+
+            def _get_from_config_data(key: str) -> bool:
+                assert isinstance(config_data, build.ConfigurationData), 'for mypy'
+                if key not in config_cache:
+                    if key in config_data:
+                        config_cache[key] = bool(config_data.get(key)[0])
+                    elif strict:
+                        raise InvalidArguments(f'sourceset.apply: key "{key}" not in passed configuration, and strict set.')
+                    else:
+                        config_cache[key] = False
+                return config_cache[key]
+
+        files = self.collect(_get_from_config_data, False)
+        res = SourceFilesObject(files)
+        return res
+
+class SourceFilesObject(ModuleObject):
+    def __init__(self, files: SourceFiles):
+        super().__init__()
+        self.files = files
+        self.methods.update({
+            'sources': self.sources_method,
+            'dependencies': self.dependencies_method,
+        })
+
+    @noPosargs
+    @noKwargs
+    def sources_method(self, state: ModuleState, args: T.List[TYPE_var], kwargs: TYPE_kwargs
+                       ) -> T.List[T.Union[mesonlib.FileOrString, build.GeneratedTypes]]:
+        return list(self.files.sources)
+
+    @noPosargs
+    @noKwargs
+    def dependencies_method(self, state: ModuleState, args: T.List[TYPE_var], kwargs: TYPE_kwargs
+                            ) -> T.List[dependencies.Dependency]:
+        return list(self.files.deps)
+
+class SourceSetModule(ExtensionModule):
+
+    INFO = ModuleInfo('sourceset', '0.51.0')
+
+    def __init__(self, interpreter: Interpreter):
+        super().__init__(interpreter)
+        self.methods.update({
+            'source_set': self.source_set,
+        })
+
+    @noKwargs
+    @noPosargs
+    def source_set(self, state: ModuleState, args: T.List[TYPE_var], kwargs: TYPE_kwargs) -> SourceSetImpl:
+        return SourceSetImpl(self.interpreter)
+
+def initialize(interp: Interpreter) -> SourceSetModule:
+    return SourceSetModule(interp)
diff --git a/vendored-meson/meson/mesonbuild/modules/wayland.py b/vendored-meson/meson/mesonbuild/modules/wayland.py
new file mode 100644
index 000000000000..99f71d000979
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/wayland.py
@@ -0,0 +1,160 @@
+# Copyright 2022 Mark Bolhuis 
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import annotations
+import os
+import typing as T
+
+from . import ExtensionModule, ModuleReturnValue, ModuleInfo
+from ..build import CustomTarget
+from ..interpreter.type_checking import NoneType, in_set_validator
+from ..interpreterbase import typed_pos_args, typed_kwargs, KwargInfo
+from ..mesonlib import File, MesonException
+
+if T.TYPE_CHECKING:
+    from typing_extensions import Literal, TypedDict
+
+    from . import ModuleState
+    from ..build import Executable
+    from ..dependencies import Dependency
+    from ..interpreter import Interpreter
+    from ..programs import ExternalProgram
+    from ..mesonlib import FileOrString
+
+    class ScanXML(TypedDict):
+
+        public: bool
+        client: bool
+        server: bool
+        include_core_only: bool
+
+    class FindProtocol(TypedDict):
+
+        state: Literal['stable', 'staging', 'unstable']
+        version: T.Optional[int]
+
+class WaylandModule(ExtensionModule):
+
+    INFO = ModuleInfo('wayland', '0.62.0', unstable=True)
+
+    def __init__(self, interpreter: Interpreter) -> None:
+        super().__init__(interpreter)
+
+        self.protocols_dep: T.Optional[Dependency] = None
+        self.pkgdatadir: T.Optional[str] = None
+        self.scanner_bin: T.Optional[T.Union[ExternalProgram, Executable]] = None
+
+        self.methods.update({
+            'scan_xml': self.scan_xml,
+            'find_protocol': self.find_protocol,
+        })
+
+    @typed_pos_args('wayland.scan_xml', varargs=(str, File), min_varargs=1)
+    @typed_kwargs(
+        'wayland.scan_xml',
+        KwargInfo('public', bool, default=False),
+        KwargInfo('client', bool, default=True),
+        KwargInfo('server', bool, default=False),
+        KwargInfo('include_core_only', bool, default=True, since='0.64.0'),
+    )
+    def scan_xml(self, state: ModuleState, args: T.Tuple[T.List[FileOrString]], kwargs: ScanXML) -> ModuleReturnValue:
+        if self.scanner_bin is None:
+            # wayland-scanner from BUILD machine must have same version as wayland
+            # libraries from HOST machine.
+            dep = state.dependency('wayland-client')
+            self.scanner_bin = state.find_tool('wayland-scanner', 'wayland-scanner', 'wayland_scanner',
+                                               wanted=dep.version)
+
+        scope = 'public' if kwargs['public'] else 'private'
+        # We have to cast because mypy can't deduce these are literals
+        sides = [i for i in T.cast("T.List[Literal['client', 'server']]", ['client', 'server']) if kwargs[i]]
+        if not sides:
+            raise MesonException('At least one of client or server keyword argument must be set to true.')
+
+        xml_files = self.interpreter.source_strings_to_files(args[0])
+        targets: T.List[CustomTarget] = []
+        for xml_file in xml_files:
+            name = os.path.splitext(os.path.basename(xml_file.fname))[0]
+
+            code = CustomTarget(
+                f'{name}-protocol',
+                state.subdir,
+                state.subproject,
+                state.environment,
+                [self.scanner_bin, f'{scope}-code', '@INPUT@', '@OUTPUT@'],
+                [xml_file],
+                [f'{name}-protocol.c'],
+                backend=state.backend,
+            )
+            targets.append(code)
+
+            for side in sides:
+                command = [self.scanner_bin, f'{side}-header', '@INPUT@', '@OUTPUT@']
+                if kwargs['include_core_only']:
+                    command.append('--include-core-only')
+
+                header = CustomTarget(
+                    f'{name}-{side}-protocol',
+                    state.subdir,
+                    state.subproject,
+                    state.environment,
+                    command,
+                    [xml_file],
+                    [f'{name}-{side}-protocol.h'],
+                    backend=state.backend,
+                )
+                targets.append(header)
+
+        return ModuleReturnValue(targets, targets)
+
+    @typed_pos_args('wayland.find_protocol', str)
+    @typed_kwargs(
+        'wayland.find_protocol',
+        KwargInfo('state', str, default='stable', validator=in_set_validator({'stable', 'staging', 'unstable'})),
+        KwargInfo('version', (int, NoneType)),
+    )
+    def find_protocol(self, state: ModuleState, args: T.Tuple[str], kwargs: FindProtocol) -> File:
+        base_name = args[0]
+        xml_state = kwargs['state']
+        version = kwargs['version']
+
+        if xml_state != 'stable' and version is None:
+            raise MesonException(f'{xml_state} protocols require a version number.')
+
+        if xml_state == 'stable' and version is not None:
+            raise MesonException('stable protocols do not require a version number.')
+
+        if self.protocols_dep is None:
+            self.protocols_dep = state.dependency('wayland-protocols')
+
+        if self.pkgdatadir is None:
+            self.pkgdatadir = self.protocols_dep.get_variable(pkgconfig='pkgdatadir', internal='pkgdatadir')
+
+        if xml_state == 'stable':
+            xml_name = f'{base_name}.xml'
+        elif xml_state == 'staging':
+            xml_name = f'{base_name}-v{version}.xml'
+        else:
+            xml_name = f'{base_name}-unstable-v{version}.xml'
+
+        path = os.path.join(self.pkgdatadir, xml_state, base_name, xml_name)
+
+        if not os.path.exists(path):
+            raise MesonException(f'The file {path} does not exist.')
+
+        return File.from_absolute_file(path)
+
+
+def initialize(interpreter: Interpreter) -> WaylandModule:
+    return WaylandModule(interpreter)
diff --git a/vendored-meson/meson/mesonbuild/modules/windows.py b/vendored-meson/meson/mesonbuild/modules/windows.py
new file mode 100644
index 000000000000..f9c7c57d80df
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/modules/windows.py
@@ -0,0 +1,214 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import enum
+import os
+import re
+import typing as T
+
+
+from . import ExtensionModule, ModuleInfo
+from . import ModuleReturnValue
+from .. import mesonlib, build
+from .. import mlog
+from ..interpreter.type_checking import DEPEND_FILES_KW, DEPENDS_KW, INCLUDE_DIRECTORIES
+from ..interpreterbase.decorators import ContainerTypeInfo, FeatureNew, KwargInfo, typed_kwargs, typed_pos_args
+from ..mesonlib import MachineChoice, MesonException
+from ..programs import ExternalProgram
+
+if T.TYPE_CHECKING:
+    from . import ModuleState
+    from ..compilers import Compiler
+    from ..interpreter import Interpreter
+
+    from typing_extensions import TypedDict
+
+    class CompileResources(TypedDict):
+
+        depend_files: T.List[mesonlib.FileOrString]
+        depends: T.List[T.Union[build.BuildTarget, build.CustomTarget]]
+        include_directories: T.List[T.Union[str, build.IncludeDirs]]
+        args: T.List[str]
+
+    class RcKwargs(TypedDict):
+        output: str
+        input: T.List[T.Union[mesonlib.FileOrString, build.CustomTargetIndex]]
+        depfile: T.Optional[str]
+        depend_files: T.List[mesonlib.FileOrString]
+        depends: T.List[T.Union[build.BuildTarget, build.CustomTarget]]
+        command: T.List[T.Union[str, ExternalProgram]]
+
+class ResourceCompilerType(enum.Enum):
+    windres = 1
+    rc = 2
+    wrc = 3
+
+class WindowsModule(ExtensionModule):
+
+    INFO = ModuleInfo('windows')
+
+    def __init__(self, interpreter: 'Interpreter'):
+        super().__init__(interpreter)
+        self._rescomp: T.Optional[T.Tuple[ExternalProgram, ResourceCompilerType]] = None
+        self.methods.update({
+            'compile_resources': self.compile_resources,
+        })
+
+    def detect_compiler(self, compilers: T.Dict[str, 'Compiler']) -> 'Compiler':
+        for l in ('c', 'cpp'):
+            if l in compilers:
+                return compilers[l]
+        raise MesonException('Resource compilation requires a C or C++ compiler.')
+
+    def _find_resource_compiler(self, state: 'ModuleState') -> T.Tuple[ExternalProgram, ResourceCompilerType]:
+        # FIXME: Does not handle `native: true` executables, see
+        # See https://github.com/mesonbuild/meson/issues/1531
+        # Take a parameter instead of the hardcoded definition below
+        for_machine = MachineChoice.HOST
+
+        if self._rescomp:
+            return self._rescomp
+
+        # Will try cross / native file and then env var
+        rescomp = ExternalProgram.from_bin_list(state.environment, for_machine, 'windres')
+
+        if not rescomp or not rescomp.found():
+            comp = self.detect_compiler(state.environment.coredata.compilers[for_machine])
+            if comp.id in {'msvc', 'clang-cl', 'intel-cl'} or (comp.linker and comp.linker.id in {'link', 'lld-link'}):
+                # Microsoft compilers uses rc irrespective of the frontend
+                rescomp = ExternalProgram('rc', silent=True)
+            else:
+                rescomp = ExternalProgram('windres', silent=True)
+
+        if not rescomp.found():
+            raise MesonException('Could not find Windows resource compiler')
+
+        for (arg, match, rc_type) in [
+                ('/?', '^.*Microsoft.*Resource Compiler.*$', ResourceCompilerType.rc),
+                ('/?', 'LLVM Resource Converter.*$', ResourceCompilerType.rc),
+                ('--version', '^.*GNU windres.*$', ResourceCompilerType.windres),
+                ('--version', '^.*Wine Resource Compiler.*$', ResourceCompilerType.wrc),
+        ]:
+            p, o, e = mesonlib.Popen_safe(rescomp.get_command() + [arg])
+            m = re.search(match, o, re.MULTILINE)
+            if m:
+                mlog.log('Windows resource compiler: %s' % m.group())
+                self._rescomp = (rescomp, rc_type)
+                break
+        else:
+            raise MesonException('Could not determine type of Windows resource compiler')
+
+        return self._rescomp
+
+    @typed_pos_args('windows.compile_resources', varargs=(str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex), min_varargs=1)
+    @typed_kwargs(
+        'windows.compile_resources',
+        DEPEND_FILES_KW.evolve(since='0.47.0'),
+        DEPENDS_KW.evolve(since='0.47.0'),
+        INCLUDE_DIRECTORIES,
+        KwargInfo('args', ContainerTypeInfo(list, str), default=[], listify=True),
+    )
+    def compile_resources(self, state: 'ModuleState',
+                          args: T.Tuple[T.List[T.Union[str, mesonlib.File, build.CustomTarget, build.CustomTargetIndex]]],
+                          kwargs: 'CompileResources') -> ModuleReturnValue:
+        extra_args = kwargs['args'].copy()
+        wrc_depend_files = kwargs['depend_files']
+        wrc_depends = kwargs['depends']
+        for d in wrc_depends:
+            if isinstance(d, build.CustomTarget):
+                extra_args += state.get_include_args([
+                    build.IncludeDirs('', [], False, [os.path.join('@BUILD_ROOT@', self.interpreter.backend.get_target_dir(d))])
+                ])
+        extra_args += state.get_include_args(kwargs['include_directories'])
+
+        rescomp, rescomp_type = self._find_resource_compiler(state)
+        if rescomp_type == ResourceCompilerType.rc:
+            # RC is used to generate .res files, a special binary resource
+            # format, which can be passed directly to LINK (apparently LINK uses
+            # CVTRES internally to convert this to a COFF object)
+            suffix = 'res'
+            res_args = extra_args + ['/nologo', '/fo@OUTPUT@', '@INPUT@']
+        elif rescomp_type == ResourceCompilerType.windres:
+            # ld only supports object files, so windres is used to generate a
+            # COFF object
+            suffix = 'o'
+            res_args = extra_args + ['@INPUT@', '@OUTPUT@']
+
+            m = 'Argument {!r} has a space which may not work with windres due to ' \
+                'a MinGW bug: https://sourceware.org/bugzilla/show_bug.cgi?id=4933'
+            for arg in extra_args:
+                if ' ' in arg:
+                    mlog.warning(m.format(arg), fatal=False)
+        else:
+            suffix = 'o'
+            res_args = extra_args + ['@INPUT@', '-o', '@OUTPUT@']
+
+        res_targets: T.List[build.CustomTarget] = []
+
+        def get_names() -> T.Iterable[T.Tuple[str, str, T.Union[str, mesonlib.File, build.CustomTargetIndex]]]:
+            for src in args[0]:
+                if isinstance(src, str):
+                    yield os.path.join(state.subdir, src), src, src
+                elif isinstance(src, mesonlib.File):
+                    yield src.relative_name(), src.fname, src
+                elif isinstance(src, build.CustomTargetIndex):
+                    FeatureNew.single_use('windows.compile_resource CustomTargetIndex in positional arguments', '0.61.0',
+                                          state.subproject, location=state.current_node)
+                    # This dance avoids a case where two indexes of the same
+                    # target are given as separate arguments.
+                    yield (f'{src.get_id()}_{src.target.get_outputs().index(src.output)}',
+                           f'windows_compile_resources_{src.get_filename()}', src)
+                else:
+                    if len(src.get_outputs()) > 1:
+                        FeatureNew.single_use('windows.compile_resource CustomTarget with multiple outputs in positional arguments',
+                                              '0.61.0', state.subproject, location=state.current_node)
+                    for i, out in enumerate(src.get_outputs()):
+                        # Chances are that src.get_filename() is already the name of that
+                        # target, add a prefix to avoid name clash.
+                        yield f'{src.get_id()}_{i}', f'windows_compile_resources_{i}_{out}', src[i]
+
+        for name, name_formatted, src in get_names():
+            # Path separators are not allowed in target names
+            name = name.replace('/', '_').replace('\\', '_').replace(':', '_')
+            name_formatted = name_formatted.replace('/', '_').replace('\\', '_').replace(':', '_')
+            output = f'{name}_@BASENAME@.{suffix}'
+            command: T.List[T.Union[str, ExternalProgram]] = []
+            command.append(rescomp)
+            command.extend(res_args)
+            depfile: T.Optional[str] = None
+            # instruct binutils windres to generate a preprocessor depfile
+            if rescomp_type == ResourceCompilerType.windres:
+                depfile = f'{output}.d'
+                command.extend(['--preprocessor-arg=-MD',
+                                '--preprocessor-arg=-MQ@OUTPUT@',
+                                '--preprocessor-arg=-MF@DEPFILE@'])
+
+            res_targets.append(build.CustomTarget(
+                name_formatted,
+                state.subdir,
+                state.subproject,
+                state.environment,
+                command,
+                [src],
+                [output],
+                depfile=depfile,
+                depend_files=wrc_depend_files,
+                extra_depends=wrc_depends,
+            ))
+
+        return ModuleReturnValue(res_targets, [res_targets])
+
+def initialize(interp: 'Interpreter') -> WindowsModule:
+    return WindowsModule(interp)
diff --git a/vendored-meson/meson/mesonbuild/mparser.py b/vendored-meson/meson/mesonbuild/mparser.py
new file mode 100644
index 000000000000..85f1ef33afe1
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/mparser.py
@@ -0,0 +1,954 @@
+# Copyright 2014-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+from dataclasses import dataclass, field
+import re
+import codecs
+import os
+import typing as T
+
+from .mesonlib import MesonException
+from . import mlog
+
+if T.TYPE_CHECKING:
+    from typing_extensions import Literal
+
+    from .ast import AstVisitor
+
+# This is the regex for the supported escape sequences of a regular string
+# literal, like 'abc\x00'
+ESCAPE_SEQUENCE_SINGLE_RE = re.compile(r'''
+    ( \\U[A-Fa-f0-9]{8}   # 8-digit hex escapes
+    | \\u[A-Fa-f0-9]{4}   # 4-digit hex escapes
+    | \\x[A-Fa-f0-9]{2}   # 2-digit hex escapes
+    | \\[0-7]{1,3}        # Octal escapes
+    | \\N\{[^}]+\}        # Unicode characters by name
+    | \\[\\'abfnrtv]      # Single-character escapes
+    )''', re.UNICODE | re.VERBOSE)
+
+def decode_match(match: T.Match[str]) -> str:
+    return codecs.decode(match.group(0).encode(), 'unicode_escape')
+
+class ParseException(MesonException):
+
+    ast: T.Optional[CodeBlockNode] = None
+
+    def __init__(self, text: str, line: str, lineno: int, colno: int) -> None:
+        # Format as error message, followed by the line with the error, followed by a caret to show the error column.
+        super().__init__(mlog.code_line(text, line, colno))
+        self.lineno = lineno
+        self.colno = colno
+
+class BlockParseException(ParseException):
+    def __init__(
+                self,
+                text: str,
+                line: str,
+                lineno: int,
+                colno: int,
+                start_line: str,
+                start_lineno: int,
+                start_colno: int,
+            ) -> None:
+        # This can be formatted in two ways - one if the block start and end are on the same line, and a different way if they are on different lines.
+
+        if lineno == start_lineno:
+            # If block start and end are on the same line, it is formatted as:
+            # Error message
+            # Followed by the line with the error
+            # Followed by a caret to show the block start
+            # Followed by underscores
+            # Followed by a caret to show the block end.
+            MesonException.__init__(self, "{}\n{}\n{}".format(text, line, '{}^{}^'.format(' ' * start_colno, '_' * (colno - start_colno - 1))))
+        else:
+            # If block start and end are on different lines, it is formatted as:
+            # Error message
+            # Followed by the line with the error
+            # Followed by a caret to show the error column.
+            # Followed by a message saying where the block started.
+            # Followed by the line of the block start.
+            # Followed by a caret for the block start.
+            MesonException.__init__(self, "%s\n%s\n%s\nFor a block that started at %d,%d\n%s\n%s" % (text, line, '%s^' % (' ' * colno), start_lineno, start_colno, start_line, "%s^" % (' ' * start_colno)))
+        self.lineno = lineno
+        self.colno = colno
+
+TV_TokenTypes = T.TypeVar('TV_TokenTypes', int, str, bool)
+
+@dataclass(eq=False)
+class Token(T.Generic[TV_TokenTypes]):
+    tid: str
+    filename: str
+    line_start: int
+    lineno: int
+    colno: int
+    bytespan: T.Tuple[int, int]
+    value: TV_TokenTypes
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, str):
+            return self.tid == other
+        elif isinstance(other, Token):
+            return self.tid == other.tid
+        return NotImplemented
+
+class Lexer:
+    def __init__(self, code: str):
+        self.code = code
+        self.keywords = {'true', 'false', 'if', 'else', 'elif',
+                         'endif', 'and', 'or', 'not', 'foreach', 'endforeach',
+                         'in', 'continue', 'break'}
+        self.future_keywords = {'return'}
+        self.in_unit_test = 'MESON_RUNNING_IN_PROJECT_TESTS' in os.environ
+        if self.in_unit_test:
+            self.keywords.update({'testcase', 'endtestcase'})
+        self.token_specification = [
+            # Need to be sorted longest to shortest.
+            ('ignore', re.compile(r'[ \t]')),
+            ('multiline_fstring', re.compile(r"f'''(.|\n)*?'''", re.M)),
+            ('fstring', re.compile(r"f'([^'\\]|(\\.))*'")),
+            ('id', re.compile('[_a-zA-Z][_0-9a-zA-Z]*')),
+            ('number', re.compile(r'0[bB][01]+|0[oO][0-7]+|0[xX][0-9a-fA-F]+|0|[1-9]\d*')),
+            ('eol_cont', re.compile(r'\\\n')),
+            ('eol', re.compile(r'\n')),
+            ('multiline_string', re.compile(r"'''(.|\n)*?'''", re.M)),
+            ('comment', re.compile(r'#.*')),
+            ('lparen', re.compile(r'\(')),
+            ('rparen', re.compile(r'\)')),
+            ('lbracket', re.compile(r'\[')),
+            ('rbracket', re.compile(r'\]')),
+            ('lcurl', re.compile(r'\{')),
+            ('rcurl', re.compile(r'\}')),
+            ('dblquote', re.compile(r'"')),
+            ('string', re.compile(r"'([^'\\]|(\\.))*'")),
+            ('comma', re.compile(r',')),
+            ('plusassign', re.compile(r'\+=')),
+            ('dot', re.compile(r'\.')),
+            ('plus', re.compile(r'\+')),
+            ('dash', re.compile(r'-')),
+            ('star', re.compile(r'\*')),
+            ('percent', re.compile(r'%')),
+            ('fslash', re.compile(r'/')),
+            ('colon', re.compile(r':')),
+            ('equal', re.compile(r'==')),
+            ('nequal', re.compile(r'!=')),
+            ('assign', re.compile(r'=')),
+            ('le', re.compile(r'<=')),
+            ('lt', re.compile(r'<')),
+            ('ge', re.compile(r'>=')),
+            ('gt', re.compile(r'>')),
+            ('questionmark', re.compile(r'\?')),
+        ]
+
+    def getline(self, line_start: int) -> str:
+        return self.code[line_start:self.code.find('\n', line_start)]
+
+    def lex(self, filename: str) -> T.Generator[Token, None, None]:
+        line_start = 0
+        lineno = 1
+        loc = 0
+        par_count = 0
+        bracket_count = 0
+        curl_count = 0
+        col = 0
+        while loc < len(self.code):
+            matched = False
+            value = None  # type: T.Union[str, bool, int]
+            for (tid, reg) in self.token_specification:
+                mo = reg.match(self.code, loc)
+                if mo:
+                    curline = lineno
+                    curline_start = line_start
+                    col = mo.start() - line_start
+                    matched = True
+                    span_start = loc
+                    loc = mo.end()
+                    span_end = loc
+                    bytespan = (span_start, span_end)
+                    match_text = mo.group()
+                    if tid in {'ignore', 'comment'}:
+                        break
+                    elif tid == 'lparen':
+                        par_count += 1
+                    elif tid == 'rparen':
+                        par_count -= 1
+                    elif tid == 'lbracket':
+                        bracket_count += 1
+                    elif tid == 'rbracket':
+                        bracket_count -= 1
+                    elif tid == 'lcurl':
+                        curl_count += 1
+                    elif tid == 'rcurl':
+                        curl_count -= 1
+                    elif tid == 'dblquote':
+                        raise ParseException('Double quotes are not supported. Use single quotes.', self.getline(line_start), lineno, col)
+                    elif tid in {'string', 'fstring'}:
+                        # Handle here and not on the regexp to give a better error message.
+                        if match_text.find("\n") != -1:
+                            msg = ("Newline character in a string detected, use ''' (three single quotes) "
+                                   "for multiline strings instead.\n"
+                                   "This will become a hard error in a future Meson release.")
+                            mlog.warning(mlog.code_line(msg, self.getline(line_start), col), location=BaseNode(lineno, col, filename))
+                        value = match_text[2 if tid == 'fstring' else 1:-1]
+                        value = ESCAPE_SEQUENCE_SINGLE_RE.sub(decode_match, value)
+                    elif tid in {'multiline_string', 'multiline_fstring'}:
+                        # For multiline strings, parse out the value and pass
+                        # through the normal string logic.
+                        # For multiline format strings, we have to emit a
+                        # different AST node so we can add a feature check,
+                        # but otherwise, it follows the normal fstring logic.
+                        if tid == 'multiline_string':
+                            value = match_text[3:-3]
+                            tid = 'string'
+                        else:
+                            value = match_text[4:-3]
+                        lines = match_text.split('\n')
+                        if len(lines) > 1:
+                            lineno += len(lines) - 1
+                            line_start = mo.end() - len(lines[-1])
+                    elif tid == 'number':
+                        value = int(match_text, base=0)
+                    elif tid == 'eol_cont':
+                        lineno += 1
+                        line_start = loc
+                        break
+                    elif tid == 'eol':
+                        lineno += 1
+                        line_start = loc
+                        if par_count > 0 or bracket_count > 0 or curl_count > 0:
+                            break
+                    elif tid == 'id':
+                        if match_text in self.keywords:
+                            tid = match_text
+                        else:
+                            if match_text in self.future_keywords:
+                                mlog.warning(f"Identifier '{match_text}' will become a reserved keyword in a future release. Please rename it.",
+                                             location=BaseNode(lineno, col, filename))
+                            value = match_text
+                    yield Token(tid, filename, curline_start, curline, col, bytespan, value)
+                    break
+            if not matched:
+                raise ParseException('lexer', self.getline(line_start), lineno, col)
+
+@dataclass
+class BaseNode:
+    lineno: int
+    colno: int
+    filename: str = field(hash=False)
+    end_lineno: int = field(hash=False)
+    end_colno: int = field(hash=False)
+
+    def __init__(self, lineno: int, colno: int, filename: str, end_lineno: T.Optional[int] = None, end_colno: T.Optional[int] = None) -> None:
+        self.lineno = lineno
+        self.colno = colno
+        self.filename = filename
+        self.end_lineno = end_lineno if end_lineno is not None else lineno
+        self.end_colno = end_colno if end_colno is not None else colno
+
+        # Attributes for the visitors
+        self.level = 0
+        self.ast_id = ''
+        self.condition_level = 0
+
+    def accept(self, visitor: 'AstVisitor') -> None:
+        fname = 'visit_{}'.format(type(self).__name__)
+        if hasattr(visitor, fname):
+            func = getattr(visitor, fname)
+            if callable(func):
+                func(self)
+
+@dataclass(unsafe_hash=True)
+class ElementaryNode(T.Generic[TV_TokenTypes], BaseNode):
+
+    value: TV_TokenTypes
+    bytespan: T.Tuple[int, int] = field(hash=False)
+
+    def __init__(self, token: Token[TV_TokenTypes]):
+        super().__init__(token.lineno, token.colno, token.filename)
+        self.value = token.value
+        self.bytespan = token.bytespan
+
+class BooleanNode(ElementaryNode[bool]):
+    pass
+
+class IdNode(ElementaryNode[str]):
+    def __str__(self) -> str:
+        return "Id node: '%s' (%d, %d)." % (self.value, self.lineno, self.colno)
+
+class NumberNode(ElementaryNode[int]):
+    pass
+
+class StringNode(ElementaryNode[str]):
+    def __str__(self) -> str:
+        return "String node: '%s' (%d, %d)." % (self.value, self.lineno, self.colno)
+
+class FormatStringNode(ElementaryNode[str]):
+    def __str__(self) -> str:
+        return f"Format string node: '{self.value}' ({self.lineno}, {self.colno})."
+
+class MultilineFormatStringNode(FormatStringNode):
+    def __str__(self) -> str:
+        return f"Multiline Format string node: '{self.value}' ({self.lineno}, {self.colno})."
+
+class ContinueNode(ElementaryNode):
+    pass
+
+class BreakNode(ElementaryNode):
+    pass
+
+@dataclass(unsafe_hash=True)
+class ArgumentNode(BaseNode):
+
+    arguments: T.List[BaseNode] = field(hash=False)
+    commas: T.List[Token] = field(hash=False)
+    kwargs: T.Dict[BaseNode, BaseNode] = field(hash=False)
+
+    def __init__(self, token: Token[TV_TokenTypes]):
+        super().__init__(token.lineno, token.colno, token.filename)
+        self.arguments = []
+        self.commas = []
+        self.kwargs = {}
+        self.order_error = False
+
+    def prepend(self, statement: BaseNode) -> None:
+        if self.num_kwargs() > 0:
+            self.order_error = True
+        if not isinstance(statement, EmptyNode):
+            self.arguments = [statement] + self.arguments
+
+    def append(self, statement: BaseNode) -> None:
+        if self.num_kwargs() > 0:
+            self.order_error = True
+        if not isinstance(statement, EmptyNode):
+            self.arguments += [statement]
+
+    def set_kwarg(self, name: IdNode, value: BaseNode) -> None:
+        if any((isinstance(x, IdNode) and name.value == x.value) for x in self.kwargs):
+            mlog.warning(f'Keyword argument "{name.value}" defined multiple times.', location=self)
+            mlog.warning('This will be an error in future Meson releases.')
+        self.kwargs[name] = value
+
+    def set_kwarg_no_check(self, name: BaseNode, value: BaseNode) -> None:
+        self.kwargs[name] = value
+
+    def num_args(self) -> int:
+        return len(self.arguments)
+
+    def num_kwargs(self) -> int:
+        return len(self.kwargs)
+
+    def incorrect_order(self) -> bool:
+        return self.order_error
+
+    def __len__(self) -> int:
+        return self.num_args() # Fixme
+
+@dataclass(unsafe_hash=True)
+class ArrayNode(BaseNode):
+
+    args: ArgumentNode
+
+    def __init__(self, args: ArgumentNode, lineno: int, colno: int, end_lineno: int, end_colno: int):
+        super().__init__(lineno, colno, args.filename, end_lineno=end_lineno, end_colno=end_colno)
+        self.args = args
+
+@dataclass(unsafe_hash=True)
+class DictNode(BaseNode):
+
+    args: ArgumentNode
+
+    def __init__(self, args: ArgumentNode, lineno: int, colno: int, end_lineno: int, end_colno: int):
+        super().__init__(lineno, colno, args.filename, end_lineno=end_lineno, end_colno=end_colno)
+        self.args = args
+
+class EmptyNode(BaseNode):
+    pass
+
+@dataclass(unsafe_hash=True)
+class OrNode(BaseNode):
+
+    left: BaseNode
+    right: BaseNode
+
+    def __init__(self, left: BaseNode, right: BaseNode):
+        super().__init__(left.lineno, left.colno, left.filename)
+        self.left = left
+        self.right = right
+
+@dataclass(unsafe_hash=True)
+class AndNode(BaseNode):
+
+    left: BaseNode
+    right: BaseNode
+
+    def __init__(self, left: BaseNode, right: BaseNode):
+        super().__init__(left.lineno, left.colno, left.filename)
+        self.left = left
+        self.right = right
+
+@dataclass(unsafe_hash=True)
+class ComparisonNode(BaseNode):
+
+    left: BaseNode
+    right: BaseNode
+    ctype: COMPARISONS
+
+    def __init__(self, ctype: COMPARISONS, left: BaseNode, right: BaseNode):
+        super().__init__(left.lineno, left.colno, left.filename)
+        self.left = left
+        self.right = right
+        self.ctype = ctype
+
+@dataclass(unsafe_hash=True)
+class ArithmeticNode(BaseNode):
+
+    left: BaseNode
+    right: BaseNode
+    # TODO: use a Literal for operation
+    operation: str
+
+    def __init__(self, operation: str, left: BaseNode, right: BaseNode):
+        super().__init__(left.lineno, left.colno, left.filename)
+        self.left = left
+        self.right = right
+        self.operation = operation
+
+
+@dataclass(unsafe_hash=True)
+class NotNode(BaseNode):
+
+    value: BaseNode
+
+    def __init__(self, token: Token[TV_TokenTypes], value: BaseNode):
+        super().__init__(token.lineno, token.colno, token.filename)
+        self.value = value
+
+@dataclass(unsafe_hash=True)
+class CodeBlockNode(BaseNode):
+
+    lines: T.List[BaseNode] = field(hash=False)
+
+    def __init__(self, token: Token[TV_TokenTypes]):
+        super().__init__(token.lineno, token.colno, token.filename)
+        self.lines = []
+
+@dataclass(unsafe_hash=True)
+class IndexNode(BaseNode):
+
+    iobject: BaseNode
+    index: BaseNode
+
+    def __init__(self, iobject: BaseNode, index: BaseNode):
+        super().__init__(iobject.lineno, iobject.colno, iobject.filename)
+        self.iobject = iobject
+        self.index = index
+
+@dataclass(unsafe_hash=True)
+class MethodNode(BaseNode):
+
+    source_object: BaseNode
+    name: str
+    args: ArgumentNode
+
+    def __init__(self, filename: str, lineno: int, colno: int, source_object: BaseNode, name: str, args: ArgumentNode):
+        super().__init__(lineno, colno, filename)
+        self.source_object = source_object
+        self.name = name
+        assert isinstance(self.name, str)
+        self.args = args
+
+@dataclass(unsafe_hash=True)
+class FunctionNode(BaseNode):
+
+    func_name: str
+    args: ArgumentNode
+
+    def __init__(self, filename: str, lineno: int, colno: int, end_lineno: int, end_colno: int, func_name: str, args: ArgumentNode):
+        super().__init__(lineno, colno, filename, end_lineno=end_lineno, end_colno=end_colno)
+        self.func_name = func_name
+        assert isinstance(func_name, str)
+        self.args = args
+
+
+@dataclass(unsafe_hash=True)
+class AssignmentNode(BaseNode):
+
+    var_name: str
+    value: BaseNode
+
+    def __init__(self, filename: str, lineno: int, colno: int, var_name: str, value: BaseNode):
+        super().__init__(lineno, colno, filename)
+        self.var_name = var_name
+        assert isinstance(var_name, str)
+        self.value = value
+
+
+@dataclass(unsafe_hash=True)
+class PlusAssignmentNode(BaseNode):
+
+    var_name: str
+    value: BaseNode
+
+    def __init__(self, filename: str, lineno: int, colno: int, var_name: str, value: BaseNode):
+        super().__init__(lineno, colno, filename)
+        self.var_name = var_name
+        assert isinstance(var_name, str)
+        self.value = value
+
+
+@dataclass(unsafe_hash=True)
+class ForeachClauseNode(BaseNode):
+
+    varnames: T.List[str] = field(hash=False)
+    items: BaseNode
+    block: CodeBlockNode
+
+    def __init__(self, token: Token, varnames: T.List[str], items: BaseNode, block: CodeBlockNode):
+        super().__init__(token.lineno, token.colno, token.filename)
+        self.varnames = varnames
+        self.items = items
+        self.block = block
+
+
+@dataclass(unsafe_hash=True)
+class IfNode(BaseNode):
+
+    condition: BaseNode
+    block: CodeBlockNode
+
+    def __init__(self, linenode: BaseNode, condition: BaseNode, block: CodeBlockNode):
+        super().__init__(linenode.lineno, linenode.colno, linenode.filename)
+        self.condition = condition
+        self.block = block
+
+
+@dataclass(unsafe_hash=True)
+class IfClauseNode(BaseNode):
+
+    ifs: T.List[IfNode] = field(hash=False)
+    elseblock: T.Union[EmptyNode, CodeBlockNode]
+
+    def __init__(self, linenode: BaseNode):
+        super().__init__(linenode.lineno, linenode.colno, linenode.filename)
+        self.ifs = []
+        self.elseblock = None
+
+@dataclass(unsafe_hash=True)
+class TestCaseClauseNode(BaseNode):
+
+    condition: BaseNode
+    block: CodeBlockNode
+
+    def __init__(self, condition: BaseNode, block: CodeBlockNode):
+        super().__init__(condition.lineno, condition.colno, condition.filename)
+        self.condition = condition
+        self.block = block
+
+@dataclass(unsafe_hash=True)
+class UMinusNode(BaseNode):
+
+    value: BaseNode
+
+    def __init__(self, current_location: Token, value: BaseNode):
+        super().__init__(current_location.lineno, current_location.colno, current_location.filename)
+        self.value = value
+
+
+@dataclass(unsafe_hash=True)
+class TernaryNode(BaseNode):
+
+    condition: BaseNode
+    trueblock: BaseNode
+    falseblock: BaseNode
+
+    def __init__(self, condition: BaseNode, trueblock: BaseNode, falseblock: BaseNode):
+        super().__init__(condition.lineno, condition.colno, condition.filename)
+        self.condition = condition
+        self.trueblock = trueblock
+        self.falseblock = falseblock
+
+if T.TYPE_CHECKING:
+    COMPARISONS = Literal['==', '!=', '<', '<=', '>=', '>', 'in', 'notin']
+
+comparison_map: T.Mapping[str, COMPARISONS] = {
+    'equal': '==',
+    'nequal': '!=',
+    'lt': '<',
+    'le': '<=',
+    'gt': '>',
+    'ge': '>=',
+    'in': 'in',
+    'not in': 'notin',
+}
+
+# Recursive descent parser for Meson's definition language.
+# Very basic apart from the fact that we have many precedence
+# levels so there are not enough words to describe them all.
+# Enter numbering:
+#
+# 1 assignment
+# 2 or
+# 3 and
+# 4 comparison
+# 5 arithmetic
+# 6 negation
+# 7 funcall, method call
+# 8 parentheses
+# 9 plain token
+
+class Parser:
+    def __init__(self, code: str, filename: str):
+        self.lexer = Lexer(code)
+        self.stream = self.lexer.lex(filename)
+        self.current = Token('eof', '', 0, 0, 0, (0, 0), None)  # type: Token
+        self.getsym()
+        self.in_ternary = False
+
+    def getsym(self) -> None:
+        try:
+            self.current = next(self.stream)
+        except StopIteration:
+            self.current = Token('eof', '', self.current.line_start, self.current.lineno, self.current.colno + self.current.bytespan[1] - self.current.bytespan[0], (0, 0), None)
+
+    def getline(self) -> str:
+        return self.lexer.getline(self.current.line_start)
+
+    def accept(self, s: str) -> bool:
+        if self.current.tid == s:
+            self.getsym()
+            return True
+        return False
+
+    def accept_any(self, tids: T.Tuple[str, ...]) -> str:
+        tid = self.current.tid
+        if tid in tids:
+            self.getsym()
+            return tid
+        return ''
+
+    def expect(self, s: str) -> bool:
+        if self.accept(s):
+            return True
+        raise ParseException(f'Expecting {s} got {self.current.tid}.', self.getline(), self.current.lineno, self.current.colno)
+
+    def block_expect(self, s: str, block_start: Token) -> bool:
+        if self.accept(s):
+            return True
+        raise BlockParseException(f'Expecting {s} got {self.current.tid}.', self.getline(), self.current.lineno, self.current.colno, self.lexer.getline(block_start.line_start), block_start.lineno, block_start.colno)
+
+    def parse(self) -> CodeBlockNode:
+        block = self.codeblock()
+        try:
+            self.expect('eof')
+        except ParseException as e:
+            e.ast = block
+            raise
+        return block
+
+    def statement(self) -> BaseNode:
+        return self.e1()
+
+    def e1(self) -> BaseNode:
+        left = self.e2()
+        if self.accept('plusassign'):
+            value = self.e1()
+            if not isinstance(left, IdNode):
+                raise ParseException('Plusassignment target must be an id.', self.getline(), left.lineno, left.colno)
+            assert isinstance(left.value, str)
+            return PlusAssignmentNode(left.filename, left.lineno, left.colno, left.value, value)
+        elif self.accept('assign'):
+            value = self.e1()
+            if not isinstance(left, IdNode):
+                raise ParseException('Assignment target must be an id.',
+                                     self.getline(), left.lineno, left.colno)
+            assert isinstance(left.value, str)
+            return AssignmentNode(left.filename, left.lineno, left.colno, left.value, value)
+        elif self.accept('questionmark'):
+            if self.in_ternary:
+                raise ParseException('Nested ternary operators are not allowed.',
+                                     self.getline(), left.lineno, left.colno)
+            self.in_ternary = True
+            trueblock = self.e1()
+            self.expect('colon')
+            falseblock = self.e1()
+            self.in_ternary = False
+            return TernaryNode(left, trueblock, falseblock)
+        return left
+
+    def e2(self) -> BaseNode:
+        left = self.e3()
+        while self.accept('or'):
+            if isinstance(left, EmptyNode):
+                raise ParseException('Invalid or clause.',
+                                     self.getline(), left.lineno, left.colno)
+            left = OrNode(left, self.e3())
+        return left
+
+    def e3(self) -> BaseNode:
+        left = self.e4()
+        while self.accept('and'):
+            if isinstance(left, EmptyNode):
+                raise ParseException('Invalid and clause.',
+                                     self.getline(), left.lineno, left.colno)
+            left = AndNode(left, self.e4())
+        return left
+
+    def e4(self) -> BaseNode:
+        left = self.e5()
+        for nodename, operator_type in comparison_map.items():
+            if self.accept(nodename):
+                return ComparisonNode(operator_type, left, self.e5())
+        if self.accept('not') and self.accept('in'):
+            return ComparisonNode('notin', left, self.e5())
+        return left
+
+    def e5(self) -> BaseNode:
+        return self.e5addsub()
+
+    def e5addsub(self) -> BaseNode:
+        op_map = {
+            'plus': 'add',
+            'dash': 'sub',
+        }
+        left = self.e5muldiv()
+        while True:
+            op = self.accept_any(tuple(op_map.keys()))
+            if op:
+                left = ArithmeticNode(op_map[op], left, self.e5muldiv())
+            else:
+                break
+        return left
+
+    def e5muldiv(self) -> BaseNode:
+        op_map = {
+            'percent': 'mod',
+            'star': 'mul',
+            'fslash': 'div',
+        }
+        left = self.e6()
+        while True:
+            op = self.accept_any(tuple(op_map.keys()))
+            if op:
+                left = ArithmeticNode(op_map[op], left, self.e6())
+            else:
+                break
+        return left
+
+    def e6(self) -> BaseNode:
+        if self.accept('not'):
+            return NotNode(self.current, self.e7())
+        if self.accept('dash'):
+            return UMinusNode(self.current, self.e7())
+        return self.e7()
+
+    def e7(self) -> BaseNode:
+        left = self.e8()
+        block_start = self.current
+        if self.accept('lparen'):
+            args = self.args()
+            self.block_expect('rparen', block_start)
+            if not isinstance(left, IdNode):
+                raise ParseException('Function call must be applied to plain id',
+                                     self.getline(), left.lineno, left.colno)
+            assert isinstance(left.value, str)
+            left = FunctionNode(left.filename, left.lineno, left.colno, self.current.lineno, self.current.colno, left.value, args)
+        go_again = True
+        while go_again:
+            go_again = False
+            if self.accept('dot'):
+                go_again = True
+                left = self.method_call(left)
+            if self.accept('lbracket'):
+                go_again = True
+                left = self.index_call(left)
+        return left
+
+    def e8(self) -> BaseNode:
+        block_start = self.current
+        if self.accept('lparen'):
+            e = self.statement()
+            self.block_expect('rparen', block_start)
+            return e
+        elif self.accept('lbracket'):
+            args = self.args()
+            self.block_expect('rbracket', block_start)
+            return ArrayNode(args, block_start.lineno, block_start.colno, self.current.lineno, self.current.colno)
+        elif self.accept('lcurl'):
+            key_values = self.key_values()
+            self.block_expect('rcurl', block_start)
+            return DictNode(key_values, block_start.lineno, block_start.colno, self.current.lineno, self.current.colno)
+        else:
+            return self.e9()
+
+    def e9(self) -> BaseNode:
+        t = self.current
+        if self.accept('true'):
+            t.value = True
+            return BooleanNode(t)
+        if self.accept('false'):
+            t.value = False
+            return BooleanNode(t)
+        if self.accept('id'):
+            return IdNode(t)
+        if self.accept('number'):
+            return NumberNode(t)
+        if self.accept('string'):
+            return StringNode(t)
+        if self.accept('fstring'):
+            return FormatStringNode(t)
+        if self.accept('multiline_fstring'):
+            return MultilineFormatStringNode(t)
+        return EmptyNode(self.current.lineno, self.current.colno, self.current.filename)
+
+    def key_values(self) -> ArgumentNode:
+        s = self.statement()  # type: BaseNode
+        a = ArgumentNode(self.current)
+
+        while not isinstance(s, EmptyNode):
+            if self.accept('colon'):
+                a.set_kwarg_no_check(s, self.statement())
+                potential = self.current
+                if not self.accept('comma'):
+                    return a
+                a.commas.append(potential)
+            else:
+                raise ParseException('Only key:value pairs are valid in dict construction.',
+                                     self.getline(), s.lineno, s.colno)
+            s = self.statement()
+        return a
+
+    def args(self) -> ArgumentNode:
+        s = self.statement()  # type: BaseNode
+        a = ArgumentNode(self.current)
+
+        while not isinstance(s, EmptyNode):
+            potential = self.current
+            if self.accept('comma'):
+                a.commas.append(potential)
+                a.append(s)
+            elif self.accept('colon'):
+                if not isinstance(s, IdNode):
+                    raise ParseException('Dictionary key must be a plain identifier.',
+                                         self.getline(), s.lineno, s.colno)
+                a.set_kwarg(s, self.statement())
+                potential = self.current
+                if not self.accept('comma'):
+                    return a
+                a.commas.append(potential)
+            else:
+                a.append(s)
+                return a
+            s = self.statement()
+        return a
+
+    def method_call(self, source_object: BaseNode) -> MethodNode:
+        methodname = self.e9()
+        if not isinstance(methodname, IdNode):
+            raise ParseException('Method name must be plain id',
+                                 self.getline(), self.current.lineno, self.current.colno)
+        assert isinstance(methodname.value, str)
+        self.expect('lparen')
+        args = self.args()
+        self.expect('rparen')
+        method = MethodNode(methodname.filename, methodname.lineno, methodname.colno, source_object, methodname.value, args)
+        if self.accept('dot'):
+            return self.method_call(method)
+        return method
+
+    def index_call(self, source_object: BaseNode) -> IndexNode:
+        index_statement = self.statement()
+        self.expect('rbracket')
+        return IndexNode(source_object, index_statement)
+
+    def foreachblock(self) -> ForeachClauseNode:
+        t = self.current
+        self.expect('id')
+        assert isinstance(t.value, str)
+        varname = t
+        varnames = [t.value]  # type: T.List[str]
+
+        if self.accept('comma'):
+            t = self.current
+            self.expect('id')
+            assert isinstance(t.value, str)
+            varnames.append(t.value)
+
+        self.expect('colon')
+        items = self.statement()
+        block = self.codeblock()
+        return ForeachClauseNode(varname, varnames, items, block)
+
+    def ifblock(self) -> IfClauseNode:
+        condition = self.statement()
+        clause = IfClauseNode(condition)
+        self.expect('eol')
+        block = self.codeblock()
+        clause.ifs.append(IfNode(clause, condition, block))
+        self.elseifblock(clause)
+        clause.elseblock = self.elseblock()
+        return clause
+
+    def elseifblock(self, clause: IfClauseNode) -> None:
+        while self.accept('elif'):
+            s = self.statement()
+            self.expect('eol')
+            b = self.codeblock()
+            clause.ifs.append(IfNode(s, s, b))
+
+    def elseblock(self) -> T.Union[CodeBlockNode, EmptyNode]:
+        if self.accept('else'):
+            self.expect('eol')
+            return self.codeblock()
+        return EmptyNode(self.current.lineno, self.current.colno, self.current.filename)
+
+    def testcaseblock(self) -> TestCaseClauseNode:
+        condition = self.statement()
+        self.expect('eol')
+        block = self.codeblock()
+        return TestCaseClauseNode(condition, block)
+
+    def line(self) -> BaseNode:
+        block_start = self.current
+        if self.current == 'eol':
+            return EmptyNode(self.current.lineno, self.current.colno, self.current.filename)
+        if self.accept('if'):
+            ifblock = self.ifblock()
+            self.block_expect('endif', block_start)
+            return ifblock
+        if self.accept('foreach'):
+            forblock = self.foreachblock()
+            self.block_expect('endforeach', block_start)
+            return forblock
+        if self.accept('continue'):
+            return ContinueNode(self.current)
+        if self.accept('break'):
+            return BreakNode(self.current)
+        if self.lexer.in_unit_test and self.accept('testcase'):
+            block = self.testcaseblock()
+            self.block_expect('endtestcase', block_start)
+            return block
+        return self.statement()
+
+    def codeblock(self) -> CodeBlockNode:
+        block = CodeBlockNode(self.current)
+        cond = True
+        try:
+            while cond:
+                curline = self.line()
+                if not isinstance(curline, EmptyNode):
+                    block.lines.append(curline)
+                cond = self.accept('eol')
+        except ParseException as e:
+            e.ast = block
+            raise
+        return block
diff --git a/vendored-meson/meson/mesonbuild/msetup.py b/vendored-meson/meson/mesonbuild/msetup.py
new file mode 100644
index 000000000000..61d66f2492e6
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/msetup.py
@@ -0,0 +1,360 @@
+# Copyright 2016-2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import argparse, datetime, glob, json, os, platform, shutil, sys, tempfile, time
+import cProfile as profile
+from pathlib import Path
+import typing as T
+
+from . import build, coredata, environment, interpreter, mesonlib, mintro, mlog
+from .mesonlib import MesonException
+
+git_ignore_file = '''# This file is autogenerated by Meson. If you change or delete it, it won't be recreated.
+*
+'''
+
+hg_ignore_file = '''# This file is autogenerated by Meson. If you change or delete it, it won't be recreated.
+syntax: glob
+**/*
+'''
+
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+    coredata.register_builtin_arguments(parser)
+    parser.add_argument('--native-file',
+                        default=[],
+                        action='append',
+                        help='File containing overrides for native compilation environment.')
+    parser.add_argument('--cross-file',
+                        default=[],
+                        action='append',
+                        help='File describing cross compilation environment.')
+    parser.add_argument('-v', '--version', action='version',
+                        version=coredata.version)
+    parser.add_argument('--profile-self', action='store_true', dest='profile',
+                        help=argparse.SUPPRESS)
+    parser.add_argument('--fatal-meson-warnings', action='store_true', dest='fatal_warnings',
+                        help='Make all Meson warnings fatal')
+    parser.add_argument('--reconfigure', action='store_true',
+                        help='Set options and reconfigure the project. Useful when new ' +
+                             'options have been added to the project and the default value ' +
+                             'is not working.')
+    parser.add_argument('--wipe', action='store_true',
+                        help='Wipe build directory and reconfigure using previous command line options. ' +
+                             'Useful when build directory got corrupted, or when rebuilding with a ' +
+                             'newer version of meson.')
+    parser.add_argument('builddir', nargs='?', default=None)
+    parser.add_argument('sourcedir', nargs='?', default=None)
+
+class MesonApp:
+    def __init__(self, options: argparse.Namespace) -> None:
+        (self.source_dir, self.build_dir) = self.validate_dirs(options.builddir,
+                                                               options.sourcedir,
+                                                               options.reconfigure,
+                                                               options.wipe)
+        if options.wipe:
+            # Make a copy of the cmd line file to make sure we can always
+            # restore that file if anything bad happens. For example if
+            # configuration fails we need to be able to wipe again.
+            restore = []
+            with tempfile.TemporaryDirectory() as d:
+                for filename in [coredata.get_cmd_line_file(self.build_dir)] + glob.glob(os.path.join(self.build_dir, environment.Environment.private_dir, '*.ini')):
+                    try:
+                        restore.append((shutil.copy(filename, d), filename))
+                    except FileNotFoundError:
+                        # validate_dirs() already verified that build_dir has
+                        # a partial build or is empty.
+                        pass
+
+                coredata.read_cmd_line_file(self.build_dir, options)
+
+                try:
+                    # Don't delete the whole tree, just all of the files and
+                    # folders in the tree. Otherwise calling wipe form the builddir
+                    # will cause a crash
+                    for l in os.listdir(self.build_dir):
+                        l = os.path.join(self.build_dir, l)
+                        if os.path.isdir(l) and not os.path.islink(l):
+                            mesonlib.windows_proof_rmtree(l)
+                        else:
+                            mesonlib.windows_proof_rm(l)
+                finally:
+                    self.add_vcs_ignore_files(self.build_dir)
+                    for b, f in restore:
+                        os.makedirs(os.path.dirname(f), exist_ok=True)
+                        shutil.move(b, f)
+
+        self.options = options
+
+    def has_build_file(self, dirname: str) -> bool:
+        fname = os.path.join(dirname, environment.build_filename)
+        return os.path.exists(fname)
+
+    def validate_core_dirs(self, dir1: str, dir2: str) -> T.Tuple[str, str]:
+        invalid_msg_prefix = f'Neither source directory {dir1!r} nor build directory {dir2!r}'
+        if dir1 is None:
+            if dir2 is None:
+                if not self.has_build_file('.') and self.has_build_file('..'):
+                    dir2 = '..'
+                else:
+                    raise MesonException('Must specify at least one directory name.')
+            dir1 = os.getcwd()
+        if dir2 is None:
+            dir2 = os.getcwd()
+        ndir1 = os.path.abspath(os.path.realpath(dir1))
+        ndir2 = os.path.abspath(os.path.realpath(dir2))
+        if not os.path.exists(ndir1) and not os.path.exists(ndir2):
+            raise MesonException(f'{invalid_msg_prefix} exist.')
+        try:
+            os.makedirs(ndir1, exist_ok=True)
+        except FileExistsError as e:
+            raise MesonException(f'{dir1} is not a directory') from e
+        try:
+            os.makedirs(ndir2, exist_ok=True)
+        except FileExistsError as e:
+            raise MesonException(f'{dir2} is not a directory') from e
+        if os.path.samefile(ndir1, ndir2):
+            # Fallback to textual compare if undefined entries found
+            has_undefined = any((s.st_ino == 0 and s.st_dev == 0) for s in (os.stat(ndir1), os.stat(ndir2)))
+            if not has_undefined or ndir1 == ndir2:
+                raise MesonException('Source and build directories must not be the same. Create a pristine build directory.')
+        if self.has_build_file(ndir1):
+            if self.has_build_file(ndir2):
+                raise MesonException(f'Both directories contain a build file {environment.build_filename}.')
+            return ndir1, ndir2
+        if self.has_build_file(ndir2):
+            return ndir2, ndir1
+        raise MesonException(f'{invalid_msg_prefix} contain a build file {environment.build_filename}.')
+
+    def add_vcs_ignore_files(self, build_dir: str) -> None:
+        with open(os.path.join(build_dir, '.gitignore'), 'w', encoding='utf-8') as ofile:
+            ofile.write(git_ignore_file)
+        with open(os.path.join(build_dir, '.hgignore'), 'w', encoding='utf-8') as ofile:
+            ofile.write(hg_ignore_file)
+
+    def validate_dirs(self, dir1: str, dir2: str, reconfigure: bool, wipe: bool) -> T.Tuple[str, str]:
+        (src_dir, build_dir) = self.validate_core_dirs(dir1, dir2)
+        if Path(build_dir) in Path(src_dir).parents:
+            raise MesonException(f'Build directory {build_dir} cannot be a parent of source directory {src_dir}')
+        if not os.listdir(build_dir):
+            self.add_vcs_ignore_files(build_dir)
+            return src_dir, build_dir
+        priv_dir = os.path.join(build_dir, 'meson-private')
+        has_valid_build = os.path.exists(os.path.join(priv_dir, 'coredata.dat'))
+        has_partial_build = os.path.isdir(priv_dir)
+        if has_valid_build:
+            if not reconfigure and not wipe:
+                print('Directory already configured.\n\n'
+                      'Just run your build command (e.g. ninja) and Meson will regenerate as necessary.\n'
+                      'If ninja fails, run "ninja reconfigure" or "meson setup --reconfigure"\n'
+                      'to force Meson to regenerate.\n\n'
+                      'If build failures persist, run "meson setup --wipe" to rebuild from scratch\n'
+                      'using the same options as passed when configuring the build.\n'
+                      'To change option values, run "meson configure" instead.')
+                # FIXME: This returns success and ignores new option values from CLI.
+                # We should either make this a hard error, or update options and
+                # return success.
+                # Note that making this an error would not be backward compatible (and also isn't
+                # universally agreed on): https://github.com/mesonbuild/meson/pull/4249.
+                raise SystemExit(0)
+        elif not has_partial_build and wipe:
+            raise MesonException(f'Directory is not empty and does not contain a previous build:\n{build_dir}')
+        return src_dir, build_dir
+
+    # See class Backend's 'generate' for comments on capture args and returned dictionary.
+    def generate(self, capture: bool = False, vslite_ctx: dict = None) -> T.Optional[dict]:
+        env = environment.Environment(self.source_dir, self.build_dir, self.options)
+        mlog.initialize(env.get_log_dir(), self.options.fatal_warnings)
+        if self.options.profile:
+            mlog.set_timestamp_start(time.monotonic())
+        with mesonlib.BuildDirLock(self.build_dir):
+            return self._generate(env, capture, vslite_ctx)
+
+    def _generate(self, env: environment.Environment, capture: bool, vslite_ctx: dict) -> T.Optional[dict]:
+        # Get all user defined options, including options that have been defined
+        # during a previous invocation or using meson configure.
+        user_defined_options = argparse.Namespace(**vars(self.options))
+        coredata.read_cmd_line_file(self.build_dir, user_defined_options)
+
+        mlog.debug('Build started at', datetime.datetime.now().isoformat())
+        mlog.debug('Main binary:', sys.executable)
+        mlog.debug('Build Options:', coredata.format_cmd_line_options(user_defined_options))
+        mlog.debug('Python system:', platform.system())
+        mlog.log(mlog.bold('The Meson build system'))
+        mlog.log('Version:', coredata.version)
+        mlog.log('Source dir:', mlog.bold(self.source_dir))
+        mlog.log('Build dir:', mlog.bold(self.build_dir))
+        if env.is_cross_build():
+            mlog.log('Build type:', mlog.bold('cross build'))
+        else:
+            mlog.log('Build type:', mlog.bold('native build'))
+        b = build.Build(env)
+
+        intr = interpreter.Interpreter(b, user_defined_options=user_defined_options)
+        # Super hack because mlog.log and mlog.debug have different signatures,
+        # and there is currently no way to annotate them correctly, unionize them, or
+        # even to write `T.Callable[[*mlog.TV_Loggable], None]`
+        logger_fun = T.cast('T.Callable[[mlog.TV_Loggable, mlog.TV_Loggable], None]',
+                            (mlog.log if env.is_cross_build() else mlog.debug))
+        build_machine = intr.builtin['build_machine']
+        host_machine = intr.builtin['host_machine']
+        target_machine = intr.builtin['target_machine']
+        assert isinstance(build_machine, interpreter.MachineHolder)
+        assert isinstance(host_machine, interpreter.MachineHolder)
+        assert isinstance(target_machine, interpreter.MachineHolder)
+        logger_fun('Build machine cpu family:', mlog.bold(build_machine.cpu_family_method([], {})))
+        logger_fun('Build machine cpu:', mlog.bold(build_machine.cpu_method([], {})))
+        mlog.log('Host machine cpu family:', mlog.bold(host_machine.cpu_family_method([], {})))
+        mlog.log('Host machine cpu:', mlog.bold(host_machine.cpu_method([], {})))
+        logger_fun('Target machine cpu family:', mlog.bold(target_machine.cpu_family_method([], {})))
+        logger_fun('Target machine cpu:', mlog.bold(target_machine.cpu_method([], {})))
+        try:
+            if self.options.profile:
+                fname = os.path.join(self.build_dir, 'meson-logs', 'profile-interpreter.log')
+                profile.runctx('intr.run()', globals(), locals(), filename=fname)
+            else:
+                intr.run()
+        except Exception as e:
+            mintro.write_meson_info_file(b, [e])
+            raise
+
+        cdf: T.Optional[str] = None
+        captured_compile_args: T.Optional[dict] = None
+        try:
+            dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat')
+            # We would like to write coredata as late as possible since we use the existence of
+            # this file to check if we generated the build file successfully. Since coredata
+            # includes settings, the build files must depend on it and appear newer. However, due
+            # to various kernel caches, we cannot guarantee that any time in Python is exactly in
+            # sync with the time that gets applied to any files. Thus, we dump this file as late as
+            # possible, but before build files, and if any error occurs, delete it.
+            cdf = env.dump_coredata()
+
+            self.finalize_postconf_hooks(b, intr)
+            if self.options.profile:
+                fname = f'profile-{intr.backend.name}-backend.log'
+                fname = os.path.join(self.build_dir, 'meson-logs', fname)
+                profile.runctx('gen_result = intr.backend.generate(capture, vslite_ctx)', globals(), locals(), filename=fname)
+                captured_compile_args = locals()['gen_result']
+                assert captured_compile_args is None or isinstance(captured_compile_args, dict)
+            else:
+                captured_compile_args = intr.backend.generate(capture, vslite_ctx)
+
+            build.save(b, dumpfile)
+            if env.first_invocation:
+                # Use path resolved by coredata because they could have been
+                # read from a pipe and wrote into a private file.
+                self.options.cross_file = env.coredata.cross_files
+                self.options.native_file = env.coredata.config_files
+                coredata.write_cmd_line_file(self.build_dir, self.options)
+            else:
+                coredata.update_cmd_line_file(self.build_dir, self.options)
+
+            # Generate an IDE introspection file with the same syntax as the already existing API
+            if self.options.profile:
+                fname = os.path.join(self.build_dir, 'meson-logs', 'profile-introspector.log')
+                profile.runctx('mintro.generate_introspection_file(b, intr.backend)', globals(), locals(), filename=fname)
+            else:
+                mintro.generate_introspection_file(b, intr.backend)
+            mintro.write_meson_info_file(b, [], True)
+
+            # Post-conf scripts must be run after writing coredata or else introspection fails.
+            intr.backend.run_postconf_scripts()
+
+            # collect warnings about unsupported build configurations; must be done after full arg processing
+            # by Interpreter() init, but this is most visible at the end
+            if env.coredata.options[mesonlib.OptionKey('backend')].value == 'xcode':
+                mlog.warning('xcode backend is currently unmaintained, patches welcome')
+            if env.coredata.options[mesonlib.OptionKey('layout')].value == 'flat':
+                mlog.warning('-Dlayout=flat is unsupported and probably broken. It was a failed experiment at '
+                             'making Windows build artifacts runnable while uninstalled, due to PATH considerations, '
+                             'but was untested by CI and anyways breaks reasonable use of conflicting targets in different subdirs. '
+                             'Please consider using `meson devenv` instead. See https://github.com/mesonbuild/meson/pull/9243 '
+                             'for details.')
+
+            if self.options.profile:
+                fname = os.path.join(self.build_dir, 'meson-logs', 'profile-startup-modules.json')
+                mods = set(sys.modules.keys())
+                mesonmods = {mod for mod in mods if (mod+'.').startswith('mesonbuild.')}
+                stdmods = sorted(mods - mesonmods)
+                data = {'stdlib': {'modules': stdmods, 'count': len(stdmods)}, 'meson': {'modules': sorted(mesonmods), 'count': len(mesonmods)}}
+                with open(fname, 'w', encoding='utf-8') as f:
+                    json.dump(data, f)
+
+        except Exception as e:
+            mintro.write_meson_info_file(b, [e])
+            if cdf is not None:
+                old_cdf = cdf + '.prev'
+                if os.path.exists(old_cdf):
+                    os.replace(old_cdf, cdf)
+                else:
+                    os.unlink(cdf)
+            raise
+
+        return captured_compile_args
+
+    def finalize_postconf_hooks(self, b: build.Build, intr: interpreter.Interpreter) -> None:
+        b.devenv.append(intr.backend.get_devenv())
+        for mod in intr.modules.values():
+            mod.postconf_hook(b)
+
+def run_genvslite_setup(options: argparse.Namespace) -> None:
+    # With --genvslite, we essentially want to invoke multiple 'setup' iterations. I.e. -
+    #    meson setup ... builddirprefix_debug
+    #    meson setup ... builddirprefix_debugoptimized
+    #    meson setup ... builddirprefix_release
+    # along with also setting up a new, thin/lite visual studio solution and projects with the multiple debug/opt/release configurations that
+    # invoke the appropriate 'meson compile ...' build commands upon the normal visual studio build/rebuild/clean actions, instead of using
+    # the native VS/msbuild system.
+    builddir_prefix = options.builddir
+    genvsliteval = options.cmd_line_options.pop(mesonlib.OptionKey('genvslite'))
+    # The command line may specify a '--backend' option, which doesn't make sense in conjunction with
+    # '--genvslite', where we always want to use a ninja back end -
+    k_backend = mesonlib.OptionKey('backend')
+    if k_backend in options.cmd_line_options.keys():
+        if options.cmd_line_options[k_backend] != 'ninja':
+            raise MesonException('Explicitly specifying a backend option with \'genvslite\' is not necessary '
+                                 '(the ninja backend is always used) but specifying a non-ninja backend '
+                                 'conflicts with a \'genvslite\' setup')
+    else:
+        options.cmd_line_options[k_backend] = 'ninja'
+    buildtypes_list = coredata.get_genvs_default_buildtype_list()
+    vslite_ctx = {}
+
+    for buildtypestr in buildtypes_list:
+        options.builddir = f'{builddir_prefix}_{buildtypestr}' # E.g. builddir_release
+        options.cmd_line_options[mesonlib.OptionKey('buildtype')] = buildtypestr
+        app = MesonApp(options)
+        vslite_ctx[buildtypestr] = app.generate(capture=True)
+    #Now for generating the 'lite' solution and project files, which will use these builds we've just set up, above.
+    options.builddir = f'{builddir_prefix}_vs'
+    options.cmd_line_options[mesonlib.OptionKey('genvslite')] = genvsliteval
+    app = MesonApp(options)
+    app.generate(capture=False, vslite_ctx=vslite_ctx)
+
+def run(options: T.Union[argparse.Namespace, T.List[str]]) -> int:
+    if not isinstance(options, argparse.Namespace):
+        parser = argparse.ArgumentParser()
+        add_arguments(parser)
+        options = parser.parse_args(options)
+    coredata.parse_cmd_line_options(options)
+
+    if mesonlib.OptionKey('genvslite') in options.cmd_line_options.keys():
+        run_genvslite_setup(options)
+    else:
+        app = MesonApp(options)
+        app.generate()
+
+    return 0
diff --git a/vendored-meson/meson/mesonbuild/msubprojects.py b/vendored-meson/meson/mesonbuild/msubprojects.py
new file mode 100755
index 000000000000..3ecfba18fe6b
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/msubprojects.py
@@ -0,0 +1,738 @@
+from __future__ import annotations
+
+from dataclasses import dataclass, InitVar
+import os, subprocess
+import argparse
+import asyncio
+import threading
+import copy
+import shutil
+from concurrent.futures.thread import ThreadPoolExecutor
+from pathlib import Path
+import typing as T
+import tarfile
+import zipfile
+
+from . import mlog
+from .ast import IntrospectionInterpreter, AstIDGenerator
+from .mesonlib import quiet_git, GitException, Popen_safe, MesonException, windows_proof_rmtree
+from .wrap.wrap import (Resolver, WrapException, ALL_TYPES, PackageDefinition,
+                        parse_patch_url, update_wrap_file, get_releases)
+
+if T.TYPE_CHECKING:
+    from typing_extensions import Protocol
+
+    SubParsers = argparse._SubParsersAction[argparse.ArgumentParser]
+
+    class Arguments(Protocol):
+        sourcedir: str
+        num_processes: int
+        subprojects: T.List[str]
+        types: str
+        subprojects_func: T.Callable[[], bool]
+        allow_insecure: bool
+
+    class UpdateArguments(Arguments):
+        rebase: bool
+        reset: bool
+
+    class UpdateWrapDBArguments(Arguments):
+        force: bool
+        releases: T.Dict[str, T.Any]
+
+    class CheckoutArguments(Arguments):
+        b: bool
+        branch_name: str
+
+    class ForeachArguments(Arguments):
+        command: str
+        args: T.List[str]
+
+    class PurgeArguments(Arguments):
+        confirm: bool
+        include_cache: bool
+
+    class PackagefilesArguments(Arguments):
+        apply: bool
+        save: bool
+
+ALL_TYPES_STRING = ', '.join(ALL_TYPES)
+
+def read_archive_files(path: Path, base_path: Path) -> T.Set[Path]:
+    if path.suffix == '.zip':
+        with zipfile.ZipFile(path, 'r') as zip_archive:
+            archive_files = {base_path / i.filename for i in zip_archive.infolist()}
+    else:
+        with tarfile.open(path) as tar_archive: # [ignore encoding]
+            archive_files = {base_path / i.name for i in tar_archive}
+    return archive_files
+
+class Logger:
+    def __init__(self, total_tasks: int) -> None:
+        self.lock = threading.Lock()
+        self.total_tasks = total_tasks
+        self.completed_tasks = 0
+        self.running_tasks: T.Set[str] = set()
+        self.should_erase_line = ''
+
+    def flush(self) -> None:
+        if self.should_erase_line:
+            print(self.should_erase_line, end='\r')
+            self.should_erase_line = ''
+
+    def print_progress(self) -> None:
+        line = f'Progress: {self.completed_tasks} / {self.total_tasks}'
+        max_len = shutil.get_terminal_size().columns - len(line)
+        running = ', '.join(self.running_tasks)
+        if len(running) + 3 > max_len:
+            running = running[:max_len - 6] + '...'
+        line = line + f' ({running})'
+        print(self.should_erase_line, line, sep='', end='\r')
+        self.should_erase_line = '\x1b[K'
+
+    def start(self, wrap_name: str) -> None:
+        with self.lock:
+            self.running_tasks.add(wrap_name)
+            self.print_progress()
+
+    def done(self, wrap_name: str, log_queue: T.List[T.Tuple[mlog.TV_LoggableList, T.Any]]) -> None:
+        with self.lock:
+            self.flush()
+            for args, kwargs in log_queue:
+                mlog.log(*args, **kwargs)
+            self.running_tasks.remove(wrap_name)
+            self.completed_tasks += 1
+            self.print_progress()
+
+
+@dataclass(eq=False)
+class Runner:
+    logger: Logger
+    r: InitVar[Resolver]
+    wrap: PackageDefinition
+    repo_dir: str
+    options: 'Arguments'
+
+    def __post_init__(self, r: Resolver) -> None:
+        # FIXME: Do a copy because Resolver.resolve() is stateful method that
+        # cannot be called from multiple threads.
+        self.wrap_resolver = copy.copy(r)
+        self.wrap_resolver.dirname = os.path.join(r.subdir_root, self.wrap.directory)
+        self.wrap_resolver.wrap = self.wrap
+        self.run_method: T.Callable[[], bool] = self.options.subprojects_func.__get__(self)
+        self.log_queue: T.List[T.Tuple[mlog.TV_LoggableList, T.Any]] = []
+
+    def log(self, *args: mlog.TV_Loggable, **kwargs: T.Any) -> None:
+        self.log_queue.append((list(args), kwargs))
+
+    def run(self) -> bool:
+        self.logger.start(self.wrap.name)
+        try:
+            result = self.run_method()
+        except MesonException as e:
+            self.log(mlog.red('Error:'), str(e))
+            result = False
+        self.logger.done(self.wrap.name, self.log_queue)
+        return result
+
+    @staticmethod
+    def pre_update_wrapdb(options: 'UpdateWrapDBArguments') -> None:
+        options.releases = get_releases(options.allow_insecure)
+
+    def update_wrapdb(self) -> bool:
+        self.log(f'Checking latest WrapDB version for {self.wrap.name}...')
+        options = T.cast('UpdateWrapDBArguments', self.options)
+
+        # Check if this wrap is in WrapDB
+        info = options.releases.get(self.wrap.name)
+        if not info:
+            self.log('  -> Wrap not found in wrapdb')
+            return True
+
+        # Determine current version
+        try:
+            wrapdb_version = self.wrap.get('wrapdb_version')
+            branch, revision = wrapdb_version.split('-', 1)
+        except WrapException:
+            # Fallback to parsing the patch URL to determine current version.
+            # This won't work for projects that have upstream Meson support.
+            try:
+                patch_url = self.wrap.get('patch_url')
+                branch, revision = parse_patch_url(patch_url)
+            except WrapException:
+                if not options.force:
+                    self.log('  ->', mlog.red('Could not determine current version, use --force to update any way'))
+                    return False
+                branch = revision = None
+
+        # Download latest wrap if version differs
+        latest_version = info['versions'][0]
+        new_branch, new_revision = latest_version.rsplit('-', 1)
+        if new_branch != branch or new_revision != revision:
+            filename = self.wrap.filename if self.wrap.has_wrap else f'{self.wrap.filename}.wrap'
+            update_wrap_file(filename, self.wrap.name,
+                             new_branch, new_revision,
+                             options.allow_insecure)
+            self.log('  -> New version downloaded:', mlog.blue(latest_version))
+        else:
+            self.log('  -> Already at latest version:', mlog.blue(latest_version))
+
+        return True
+
+    def update_file(self) -> bool:
+        options = T.cast('UpdateArguments', self.options)
+        if options.reset:
+            # Delete existing directory and redownload. It is possible that nothing
+            # changed but we have no way to know. Hopefully tarballs are still
+            # cached.
+            windows_proof_rmtree(self.repo_dir)
+            try:
+                self.wrap_resolver.resolve(self.wrap.name, 'meson')
+                self.log('  -> New version extracted')
+                return True
+            except WrapException as e:
+                self.log('  ->', mlog.red(str(e)))
+                return False
+        else:
+            # The subproject has not changed, or the new source and/or patch
+            # tarballs should be extracted in the same directory than previous
+            # version.
+            self.log('  -> Subproject has not changed, or the new source/patch needs to be extracted on the same location.')
+            self.log('     Pass --reset option to delete directory and redownload.')
+            return False
+
+    def git_output(self, cmd: T.List[str]) -> str:
+        return quiet_git(cmd, self.repo_dir, check=True)[1]
+
+    def git_verbose(self, cmd: T.List[str]) -> None:
+        self.log(self.git_output(cmd))
+
+    def git_stash(self) -> None:
+        # That git command return some output when there is something to stash.
+        # We don't want to stash when there is nothing to stash because that would
+        # print spurious "No local changes to save".
+        if quiet_git(['status', '--porcelain', ':!/.meson-subproject-wrap-hash.txt'], self.repo_dir)[1].strip():
+            # Don't pipe stdout here because we want the user to see their changes have
+            # been saved.
+            # Note: `--all` is used, and not `--include-untracked`, to prevent
+            # a potential error if `.meson-subproject-wrap-hash.txt` matches a
+            # gitignore pattern.
+            # We must add the dot in addition to the negation, because older versions of git have a bug.
+            self.git_verbose(['stash', 'push', '--all', ':!/.meson-subproject-wrap-hash.txt', '.'])
+
+    def git_show(self) -> None:
+        commit_message = self.git_output(['show', '--quiet', '--pretty=format:%h%n%d%n%s%n[%an]'])
+        parts = [s.strip() for s in commit_message.split('\n')]
+        self.log('  ->', mlog.yellow(parts[0]), mlog.red(parts[1]), parts[2], mlog.blue(parts[3]))
+
+    def git_rebase(self, revision: str) -> bool:
+        try:
+            self.git_output(['-c', 'rebase.autoStash=true', 'rebase', 'FETCH_HEAD'])
+        except GitException as e:
+            self.log('  -> Could not rebase', mlog.bold(self.repo_dir), 'onto', mlog.bold(revision))
+            self.log(mlog.red(e.output))
+            self.log(mlog.red(str(e)))
+            return False
+        return True
+
+    def git_reset(self, revision: str) -> bool:
+        try:
+            # Stash local changes, commits can always be found back in reflog, to
+            # avoid any data lost by mistake.
+            self.git_stash()
+            self.git_output(['reset', '--hard', 'FETCH_HEAD'])
+            self.wrap_resolver.apply_patch()
+            self.wrap_resolver.apply_diff_files()
+        except GitException as e:
+            self.log('  -> Could not reset', mlog.bold(self.repo_dir), 'to', mlog.bold(revision))
+            self.log(mlog.red(e.output))
+            self.log(mlog.red(str(e)))
+            return False
+        return True
+
+    def git_checkout(self, revision: str, create: bool = False) -> bool:
+        cmd = ['checkout', '--ignore-other-worktrees']
+        if create:
+            cmd.append('-b')
+        cmd += [revision, '--']
+        try:
+            # Stash local changes, commits can always be found back in reflog, to
+            # avoid any data lost by mistake.
+            self.git_stash()
+            self.git_output(cmd)
+        except GitException as e:
+            self.log('  -> Could not checkout', mlog.bold(revision), 'in', mlog.bold(self.repo_dir))
+            self.log(mlog.red(e.output))
+            self.log(mlog.red(str(e)))
+            return False
+        return True
+
+    def git_checkout_and_reset(self, revision: str) -> bool:
+        # revision could be a branch that already exists but is outdated, so we still
+        # have to reset after the checkout.
+        success = self.git_checkout(revision)
+        if success:
+            success = self.git_reset(revision)
+        return success
+
+    def git_checkout_and_rebase(self, revision: str) -> bool:
+        # revision could be a branch that already exists but is outdated, so we still
+        # have to rebase after the checkout.
+        success = self.git_checkout(revision)
+        if success:
+            success = self.git_rebase(revision)
+        return success
+
+    def update_git(self) -> bool:
+        options = T.cast('UpdateArguments', self.options)
+        if not os.path.exists(os.path.join(self.repo_dir, '.git')):
+            if options.reset:
+                # Delete existing directory and redownload
+                windows_proof_rmtree(self.repo_dir)
+                try:
+                    self.wrap_resolver.resolve(self.wrap.name, 'meson')
+                    self.update_git_done()
+                    return True
+                except WrapException as e:
+                    self.log('  ->', mlog.red(str(e)))
+                    return False
+            else:
+                self.log('  -> Not a git repository.')
+                self.log('Pass --reset option to delete directory and redownload.')
+                return False
+        revision = self.wrap.values.get('revision')
+        url = self.wrap.values.get('url')
+        push_url = self.wrap.values.get('push-url')
+        if not revision or not url:
+            # It could be a detached git submodule for example.
+            self.log('  -> No revision or URL specified.')
+            return True
+        try:
+            origin_url = self.git_output(['remote', 'get-url', 'origin']).strip()
+        except GitException as e:
+            self.log('  -> Failed to determine current origin URL in', mlog.bold(self.repo_dir))
+            self.log(mlog.red(e.output))
+            self.log(mlog.red(str(e)))
+            return False
+        if options.reset:
+            try:
+                self.git_output(['remote', 'set-url', 'origin', url])
+                if push_url:
+                    self.git_output(['remote', 'set-url', '--push', 'origin', push_url])
+            except GitException as e:
+                self.log('  -> Failed to reset origin URL in', mlog.bold(self.repo_dir))
+                self.log(mlog.red(e.output))
+                self.log(mlog.red(str(e)))
+                return False
+        elif url != origin_url:
+            self.log(f'  -> URL changed from {origin_url!r} to {url!r}')
+            return False
+        try:
+            # Same as `git branch --show-current` but compatible with older git version
+            branch = self.git_output(['rev-parse', '--abbrev-ref', 'HEAD']).strip()
+            branch = branch if branch != 'HEAD' else ''
+        except GitException as e:
+            self.log('  -> Failed to determine current branch in', mlog.bold(self.repo_dir))
+            self.log(mlog.red(e.output))
+            self.log(mlog.red(str(e)))
+            return False
+        if self.wrap_resolver.is_git_full_commit_id(revision) and \
+                quiet_git(['rev-parse', '--verify', revision + '^{commit}'], self.repo_dir)[0]:
+            # The revision we need is both a commit and available. So we do not
+            # need to fetch it because it cannot be updated.  Instead, trick
+            # git into setting FETCH_HEAD just in case, from the local commit.
+            self.git_output(['fetch', '.', revision])
+        else:
+            try:
+                # Fetch only the revision we need, this avoids fetching useless branches.
+                # revision can be either a branch, tag or commit id. In all cases we want
+                # FETCH_HEAD to be set to the desired commit and "git checkout "
+                # to to either switch to existing/new branch, or detach to tag/commit.
+                # It is more complicated than it first appear, see discussion there:
+                # https://github.com/mesonbuild/meson/pull/7723#discussion_r488816189.
+                heads_refmap = '+refs/heads/*:refs/remotes/origin/*'
+                tags_refmap = '+refs/tags/*:refs/tags/*'
+                self.git_output(['fetch', '--refmap', heads_refmap, '--refmap', tags_refmap, 'origin', revision])
+            except GitException as e:
+                self.log('  -> Could not fetch revision', mlog.bold(revision), 'in', mlog.bold(self.repo_dir))
+                self.log(mlog.red(e.output))
+                self.log(mlog.red(str(e)))
+                return False
+
+        if branch == '':
+            # We are currently in detached mode
+            if options.reset:
+                success = self.git_checkout_and_reset(revision)
+            else:
+                success = self.git_checkout_and_rebase(revision)
+        elif branch == revision:
+            # We are in the same branch. A reset could still be needed in the case
+            # a force push happened on remote repository.
+            if options.reset:
+                success = self.git_reset(revision)
+            else:
+                success = self.git_rebase(revision)
+        else:
+            # We are in another branch, either the user created their own branch and
+            # we should rebase it, or revision changed in the wrap file and we need
+            # to checkout the new branch.
+            if options.reset:
+                success = self.git_checkout_and_reset(revision)
+            else:
+                success = self.git_rebase(revision)
+        if success:
+            self.update_git_done()
+        return success
+
+    def update_git_done(self) -> None:
+        self.git_output(['submodule', 'update', '--checkout', '--recursive'])
+        self.git_show()
+
+    def update_hg(self) -> bool:
+        revno = self.wrap.get('revision')
+        if revno.lower() == 'tip':
+            # Failure to do pull is not a fatal error,
+            # because otherwise you can't develop without
+            # a working net connection.
+            subprocess.call(['hg', 'pull'], cwd=self.repo_dir)
+        else:
+            if subprocess.call(['hg', 'checkout', revno], cwd=self.repo_dir) != 0:
+                subprocess.check_call(['hg', 'pull'], cwd=self.repo_dir)
+                subprocess.check_call(['hg', 'checkout', revno], cwd=self.repo_dir)
+        return True
+
+    def update_svn(self) -> bool:
+        revno = self.wrap.get('revision')
+        _, out, _ = Popen_safe(['svn', 'info', '--show-item', 'revision', self.repo_dir])
+        current_revno = out
+        if current_revno == revno:
+            return True
+        if revno.lower() == 'head':
+            # Failure to do pull is not a fatal error,
+            # because otherwise you can't develop without
+            # a working net connection.
+            subprocess.call(['svn', 'update'], cwd=self.repo_dir)
+        else:
+            subprocess.check_call(['svn', 'update', '-r', revno], cwd=self.repo_dir)
+        return True
+
+    def update(self) -> bool:
+        self.log(f'Updating {self.wrap.name}...')
+        success = False
+        if not os.path.isdir(self.repo_dir):
+            self.log('  -> Not used.')
+            # It is not an error if we are updating all subprojects.
+            success = not self.options.subprojects
+        elif self.wrap.type == 'file':
+            success = self.update_file()
+        elif self.wrap.type == 'git':
+            success = self.update_git()
+        elif self.wrap.type == 'hg':
+            success = self.update_hg()
+        elif self.wrap.type == 'svn':
+            success = self.update_svn()
+        elif self.wrap.type is None:
+            self.log('  -> Cannot update subproject with no wrap file')
+            # It is not an error if we are updating all subprojects.
+            success = not self.options.subprojects
+        else:
+            self.log('  -> Cannot update', self.wrap.type, 'subproject')
+        if success and os.path.isdir(self.repo_dir):
+            self.wrap.update_hash_cache(self.repo_dir)
+        return success
+
+    def checkout(self) -> bool:
+        options = T.cast('CheckoutArguments', self.options)
+
+        if self.wrap.type != 'git' or not os.path.isdir(self.repo_dir):
+            return True
+        branch_name = options.branch_name if options.branch_name else self.wrap.get('revision')
+        if not branch_name:
+            # It could be a detached git submodule for example.
+            return True
+        self.log(f'Checkout {branch_name} in {self.wrap.name}...')
+        if self.git_checkout(branch_name, create=options.b):
+            self.git_show()
+            return True
+        return False
+
+    def download(self) -> bool:
+        self.log(f'Download {self.wrap.name}...')
+        if os.path.isdir(self.repo_dir):
+            self.log('  -> Already downloaded')
+            return True
+        try:
+            self.wrap_resolver.resolve(self.wrap.name, 'meson')
+            self.log('  -> done')
+        except WrapException as e:
+            self.log('  ->', mlog.red(str(e)))
+            return False
+        return True
+
+    def foreach(self) -> bool:
+        options = T.cast('ForeachArguments', self.options)
+
+        self.log(f'Executing command in {self.repo_dir}')
+        if not os.path.isdir(self.repo_dir):
+            self.log('  -> Not downloaded yet')
+            return True
+        cmd = [options.command] + options.args
+        p, out, _ = Popen_safe(cmd, stderr=subprocess.STDOUT, cwd=self.repo_dir)
+        if p.returncode != 0:
+            err_message = "Command '{}' returned non-zero exit status {}.".format(" ".join(cmd), p.returncode)
+            self.log('  -> ', mlog.red(err_message))
+            self.log(out, end='')
+            return False
+
+        self.log(out, end='')
+        return True
+
+    def purge(self) -> bool:
+        options = T.cast('PurgeArguments', self.options)
+
+        # if subproject is not wrap-based, then don't remove it
+        if not self.wrap.type:
+            return True
+
+        if self.wrap.redirected:
+            redirect_file = Path(self.wrap.original_filename).resolve()
+            if options.confirm:
+                redirect_file.unlink()
+            mlog.log(f'Deleting {redirect_file}')
+
+        if self.wrap.type == 'redirect':
+            redirect_file = Path(self.wrap.filename).resolve()
+            if options.confirm:
+                redirect_file.unlink()
+            self.log(f'Deleting {redirect_file}')
+
+        if options.include_cache:
+            packagecache = Path(self.wrap_resolver.cachedir).resolve()
+            try:
+                subproject_cache_file = packagecache / self.wrap.get("source_filename")
+                if subproject_cache_file.is_file():
+                    if options.confirm:
+                        subproject_cache_file.unlink()
+                    self.log(f'Deleting {subproject_cache_file}')
+            except WrapException:
+                pass
+
+            try:
+                subproject_patch_file = packagecache / self.wrap.get("patch_filename")
+                if subproject_patch_file.is_file():
+                    if options.confirm:
+                        subproject_patch_file.unlink()
+                    self.log(f'Deleting {subproject_patch_file}')
+            except WrapException:
+                pass
+
+            # Don't log that we will remove an empty directory. Since purge is
+            # parallelized, another thread could have deleted it already.
+            try:
+                if not any(packagecache.iterdir()):
+                    windows_proof_rmtree(str(packagecache))
+            except FileNotFoundError:
+                pass
+
+        # NOTE: Do not use .resolve() here; the subproject directory may be a symlink
+        subproject_source_dir = Path(self.repo_dir)
+        # Resolve just the parent, just to print out the full path
+        subproject_source_dir = subproject_source_dir.parent.resolve() / subproject_source_dir.name
+
+        # Don't follow symlink. This is covered by the next if statement, but why
+        # not be doubly sure.
+        if subproject_source_dir.is_symlink():
+            if options.confirm:
+                subproject_source_dir.unlink()
+            self.log(f'Deleting {subproject_source_dir}')
+            return True
+        if not subproject_source_dir.is_dir():
+            return True
+
+        try:
+            if options.confirm:
+                windows_proof_rmtree(str(subproject_source_dir))
+            self.log(f'Deleting {subproject_source_dir}')
+        except OSError as e:
+            mlog.error(f'Unable to remove: {subproject_source_dir}: {e}')
+            return False
+
+        return True
+
+    @staticmethod
+    def post_purge(options: 'PurgeArguments') -> None:
+        if not options.confirm:
+            mlog.log('')
+            mlog.log('Nothing has been deleted, run again with --confirm to apply.')
+
+    def packagefiles(self) -> bool:
+        options = T.cast('PackagefilesArguments', self.options)
+
+        if options.apply and options.save:
+            # not quite so nice as argparse failure
+            print('error: --apply and --save are mutually exclusive')
+            return False
+        if options.apply:
+            self.log(f'Re-applying patchfiles overlay for {self.wrap.name}...')
+            if not os.path.isdir(self.repo_dir):
+                self.log('  -> Not downloaded yet')
+                return True
+            self.wrap_resolver.apply_patch()
+            return True
+        if options.save:
+            if 'patch_directory' not in self.wrap.values:
+                mlog.error('can only save packagefiles to patch_directory')
+                return False
+            if 'source_filename' not in self.wrap.values:
+                mlog.error('can only save packagefiles from a [wrap-file]')
+                return False
+            archive_path = Path(self.wrap_resolver.cachedir, self.wrap.values['source_filename'])
+            lead_directory_missing = bool(self.wrap.values.get('lead_directory_missing', False))
+            directory = Path(self.repo_dir)
+            packagefiles = Path(self.wrap.filesdir, self.wrap.values['patch_directory'])
+
+            base_path = directory if lead_directory_missing else directory.parent
+            archive_files = read_archive_files(archive_path, base_path)
+            directory_files = set(directory.glob('**/*'))
+
+            self.log(f'Saving {self.wrap.name} to {packagefiles}...')
+            shutil.rmtree(packagefiles)
+            for src_path in directory_files - archive_files:
+                if not src_path.is_file():
+                    continue
+                rel_path = src_path.relative_to(directory)
+                dst_path = packagefiles / rel_path
+                dst_path.parent.mkdir(parents=True, exist_ok=True)
+                shutil.copyfile(src_path, dst_path)
+        return True
+
+
+def add_common_arguments(p: argparse.ArgumentParser) -> None:
+    p.add_argument('--sourcedir', default='.',
+                   help='Path to source directory')
+    p.add_argument('--types', default='',
+                   help=f'Comma-separated list of subproject types. Supported types are: {ALL_TYPES_STRING} (default: all)')
+    p.add_argument('--num-processes', default=None, type=int,
+                   help='How many parallel processes to use (Since 0.59.0).')
+    p.add_argument('--allow-insecure', default=False, action='store_true',
+                   help='Allow insecure server connections.')
+
+def add_subprojects_argument(p: argparse.ArgumentParser) -> None:
+    p.add_argument('subprojects', nargs='*',
+                   help='List of subprojects (default: all)')
+
+def add_wrap_update_parser(subparsers: 'SubParsers') -> argparse.ArgumentParser:
+    p = subparsers.add_parser('update', help='Update wrap files from WrapDB (Since 0.63.0)')
+    p.add_argument('--force', default=False, action='store_true',
+                   help='Update wraps that does not seems to come from WrapDB')
+    add_common_arguments(p)
+    add_subprojects_argument(p)
+    p.set_defaults(subprojects_func=Runner.update_wrapdb)
+    p.set_defaults(pre_func=Runner.pre_update_wrapdb)
+    return p
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+    subparsers = parser.add_subparsers(title='Commands', dest='command')
+    subparsers.required = True
+
+    p = subparsers.add_parser('update', help='Update all subprojects from wrap files')
+    p.add_argument('--rebase', default=True, action='store_true',
+                   help='Rebase your branch on top of wrap\'s revision. ' +
+                        'Deprecated, it is now the default behaviour. (git only)')
+    p.add_argument('--reset', default=False, action='store_true',
+                   help='Checkout wrap\'s revision and hard reset to that commit. (git only)')
+    add_common_arguments(p)
+    add_subprojects_argument(p)
+    p.set_defaults(subprojects_func=Runner.update)
+
+    p = subparsers.add_parser('checkout', help='Checkout a branch (git only)')
+    p.add_argument('-b', default=False, action='store_true',
+                   help='Create a new branch')
+    p.add_argument('branch_name', nargs='?',
+                   help='Name of the branch to checkout or create (default: revision set in wrap file)')
+    add_common_arguments(p)
+    add_subprojects_argument(p)
+    p.set_defaults(subprojects_func=Runner.checkout)
+
+    p = subparsers.add_parser('download', help='Ensure subprojects are fetched, even if not in use. ' +
+                                               'Already downloaded subprojects are not modified. ' +
+                                               'This can be used to pre-fetch all subprojects and avoid downloads during configure.')
+    add_common_arguments(p)
+    add_subprojects_argument(p)
+    p.set_defaults(subprojects_func=Runner.download)
+
+    p = subparsers.add_parser('foreach', help='Execute a command in each subproject directory.')
+    p.add_argument('command', metavar='command ...',
+                   help='Command to execute in each subproject directory')
+    p.add_argument('args', nargs=argparse.REMAINDER,
+                   help=argparse.SUPPRESS)
+    add_common_arguments(p)
+    p.set_defaults(subprojects=[])
+    p.set_defaults(subprojects_func=Runner.foreach)
+
+    p = subparsers.add_parser('purge', help='Remove all wrap-based subproject artifacts')
+    add_common_arguments(p)
+    add_subprojects_argument(p)
+    p.add_argument('--include-cache', action='store_true', default=False, help='Remove the package cache as well')
+    p.add_argument('--confirm', action='store_true', default=False, help='Confirm the removal of subproject artifacts')
+    p.set_defaults(subprojects_func=Runner.purge)
+    p.set_defaults(post_func=Runner.post_purge)
+
+    p = subparsers.add_parser('packagefiles', help='Manage the packagefiles overlay')
+    add_common_arguments(p)
+    add_subprojects_argument(p)
+    p.add_argument('--apply', action='store_true', default=False, help='Apply packagefiles to the subproject')
+    p.add_argument('--save', action='store_true', default=False, help='Save packagefiles from the subproject')
+    p.set_defaults(subprojects_func=Runner.packagefiles)
+
+def run(options: 'Arguments') -> int:
+    source_dir = os.path.relpath(os.path.realpath(options.sourcedir))
+    if not os.path.isfile(os.path.join(source_dir, 'meson.build')):
+        mlog.error('Directory', mlog.bold(source_dir), 'does not seem to be a Meson source directory.')
+        return 1
+    with mlog.no_logging():
+        intr = IntrospectionInterpreter(source_dir, '', 'none', visitors = [AstIDGenerator()])
+        intr.load_root_meson_file()
+        intr.sanity_check_ast()
+        intr.parse_project()
+    subproject_dir = intr.subproject_dir
+    if not os.path.isdir(os.path.join(source_dir, subproject_dir)):
+        mlog.log('Directory', mlog.bold(source_dir), 'does not seem to have subprojects.')
+        return 0
+    r = Resolver(source_dir, subproject_dir, wrap_frontend=True, allow_insecure=options.allow_insecure, silent=True)
+    if options.subprojects:
+        wraps = [wrap for name, wrap in r.wraps.items() if name in options.subprojects]
+    else:
+        wraps = list(r.wraps.values())
+    types = [t.strip() for t in options.types.split(',')] if options.types else []
+    for t in types:
+        if t not in ALL_TYPES:
+            raise MesonException(f'Unknown subproject type {t!r}, supported types are: {ALL_TYPES_STRING}')
+    tasks: T.List[T.Awaitable[bool]] = []
+    task_names: T.List[str] = []
+    loop = asyncio.new_event_loop()
+    asyncio.set_event_loop(loop)
+    executor = ThreadPoolExecutor(options.num_processes)
+    if types:
+        wraps = [wrap for wrap in wraps if wrap.type in types]
+    pre_func = getattr(options, 'pre_func', None)
+    if pre_func:
+        pre_func(options)
+    logger = Logger(len(wraps))
+    for wrap in wraps:
+        dirname = Path(subproject_dir, wrap.directory).as_posix()
+        runner = Runner(logger, r, wrap, dirname, options)
+        task = loop.run_in_executor(executor, runner.run)
+        tasks.append(task)
+        task_names.append(wrap.name)
+    results = loop.run_until_complete(asyncio.gather(*tasks))
+    logger.flush()
+    post_func = getattr(options, 'post_func', None)
+    if post_func:
+        post_func(options)
+    failures = [name for name, success in zip(task_names, results) if not success]
+    if failures:
+        m = 'Please check logs above as command failed in some subprojects which could have been left in conflict state: '
+        m += ', '.join(failures)
+        mlog.warning(m)
+    return len(failures)
diff --git a/vendored-meson/meson/mesonbuild/mtest.py b/vendored-meson/meson/mesonbuild/mtest.py
new file mode 100644
index 000000000000..eb56c42be555
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/mtest.py
@@ -0,0 +1,2195 @@
+# Copyright 2016-2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A tool to run tests in many different ways.
+from __future__ import annotations
+
+from pathlib import Path
+from collections import deque
+from contextlib import suppress
+from copy import deepcopy
+from fnmatch import fnmatch
+import argparse
+import asyncio
+import datetime
+import enum
+import json
+import multiprocessing
+import os
+import pickle
+import platform
+import random
+import re
+import signal
+import subprocess
+import shlex
+import sys
+import textwrap
+import time
+import typing as T
+import unicodedata
+import xml.etree.ElementTree as et
+
+from . import build
+from . import environment
+from . import mlog
+from .coredata import MesonVersionMismatchException, OptionKey, major_versions_differ
+from .coredata import version as coredata_version
+from .mesonlib import (MesonException, OrderedSet, RealPathAction,
+                       get_wine_shortpath, join_args, split_args, setup_vsenv)
+from .mintro import get_infodir, load_info_file
+from .programs import ExternalProgram
+from .backend.backends import TestProtocol, TestSerialisation
+
+if T.TYPE_CHECKING:
+    TYPE_TAPResult = T.Union['TAPParser.Test',
+                             'TAPParser.Error',
+                             'TAPParser.Version',
+                             'TAPParser.Plan',
+                             'TAPParser.UnknownLine',
+                             'TAPParser.Bailout']
+
+
+# GNU autotools interprets a return code of 77 from tests it executes to
+# mean that the test should be skipped.
+GNU_SKIP_RETURNCODE = 77
+
+# GNU autotools interprets a return code of 99 from tests it executes to
+# mean that the test failed even before testing what it is supposed to test.
+GNU_ERROR_RETURNCODE = 99
+
+# Exit if 3 Ctrl-C's are received within one second
+MAX_CTRLC = 3
+
+# Define unencodable xml characters' regex for replacing them with their
+# printable representation
+UNENCODABLE_XML_UNICHRS: T.List[T.Tuple[int, int]] = [
+    (0x00, 0x08), (0x0B, 0x0C), (0x0E, 0x1F), (0x7F, 0x84),
+    (0x86, 0x9F), (0xFDD0, 0xFDEF), (0xFFFE, 0xFFFF)]
+# Not narrow build
+if sys.maxunicode >= 0x10000:
+    UNENCODABLE_XML_UNICHRS.extend([
+        (0x1FFFE, 0x1FFFF), (0x2FFFE, 0x2FFFF),
+        (0x3FFFE, 0x3FFFF), (0x4FFFE, 0x4FFFF),
+        (0x5FFFE, 0x5FFFF), (0x6FFFE, 0x6FFFF),
+        (0x7FFFE, 0x7FFFF), (0x8FFFE, 0x8FFFF),
+        (0x9FFFE, 0x9FFFF), (0xAFFFE, 0xAFFFF),
+        (0xBFFFE, 0xBFFFF), (0xCFFFE, 0xCFFFF),
+        (0xDFFFE, 0xDFFFF), (0xEFFFE, 0xEFFFF),
+        (0xFFFFE, 0xFFFFF), (0x10FFFE, 0x10FFFF)])
+UNENCODABLE_XML_CHR_RANGES = [fr'{chr(low)}-{chr(high)}' for (low, high) in UNENCODABLE_XML_UNICHRS]
+UNENCODABLE_XML_CHRS_RE = re.compile('([' + ''.join(UNENCODABLE_XML_CHR_RANGES) + '])')
+
+
+def is_windows() -> bool:
+    platname = platform.system().lower()
+    return platname == 'windows'
+
+def is_cygwin() -> bool:
+    return sys.platform == 'cygwin'
+
+UNIWIDTH_MAPPING = {'F': 2, 'H': 1, 'W': 2, 'Na': 1, 'N': 1, 'A': 1}
+def uniwidth(s: str) -> int:
+    result = 0
+    for c in s:
+        w = unicodedata.east_asian_width(c)
+        result += UNIWIDTH_MAPPING[w]
+    return result
+
+def determine_worker_count() -> int:
+    varname = 'MESON_TESTTHREADS'
+    if varname in os.environ:
+        try:
+            num_workers = int(os.environ[varname])
+        except ValueError:
+            print(f'Invalid value in {varname}, using 1 thread.')
+            num_workers = 1
+    else:
+        try:
+            # Fails in some weird environments such as Debian
+            # reproducible build.
+            num_workers = multiprocessing.cpu_count()
+        except Exception:
+            num_workers = 1
+    return num_workers
+
+def add_arguments(parser: argparse.ArgumentParser) -> None:
+    parser.add_argument('--maxfail', default=0, type=int,
+                        help='Number of failing tests before aborting the '
+                        'test run. (default: 0, to disable aborting on failure)')
+    parser.add_argument('--repeat', default=1, dest='repeat', type=int,
+                        help='Number of times to run the tests.')
+    parser.add_argument('--no-rebuild', default=False, action='store_true',
+                        help='Do not rebuild before running tests.')
+    parser.add_argument('--gdb', default=False, dest='gdb', action='store_true',
+                        help='Run test under gdb.')
+    parser.add_argument('--gdb-path', default='gdb', dest='gdb_path',
+                        help='Path to the gdb binary (default: gdb).')
+    parser.add_argument('--list', default=False, dest='list', action='store_true',
+                        help='List available tests.')
+    parser.add_argument('--wrapper', default=None, dest='wrapper', type=split_args,
+                        help='wrapper to run tests with (e.g. Valgrind)')
+    parser.add_argument('-C', dest='wd', action=RealPathAction,
+                        # https://github.com/python/typeshed/issues/3107
+                        # https://github.com/python/mypy/issues/7177
+                        type=os.path.abspath,  # type: ignore
+                        help='directory to cd into before running')
+    parser.add_argument('--suite', default=[], dest='include_suites', action='append', metavar='SUITE',
+                        help='Only run tests belonging to the given suite.')
+    parser.add_argument('--no-suite', default=[], dest='exclude_suites', action='append', metavar='SUITE',
+                        help='Do not run tests belonging to the given suite.')
+    parser.add_argument('--no-stdsplit', default=True, dest='split', action='store_false',
+                        help='Do not split stderr and stdout in test logs.')
+    parser.add_argument('--print-errorlogs', default=False, action='store_true',
+                        help="Whether to print failing tests' logs.")
+    parser.add_argument('--benchmark', default=False, action='store_true',
+                        help="Run benchmarks instead of tests.")
+    parser.add_argument('--logbase', default='testlog',
+                        help="Base name for log file.")
+    parser.add_argument('--num-processes', default=determine_worker_count(), type=int,
+                        help='How many parallel processes to use.')
+    parser.add_argument('-v', '--verbose', default=False, action='store_true',
+                        help='Do not redirect stdout and stderr')
+    parser.add_argument('-q', '--quiet', default=False, action='store_true',
+                        help='Produce less output to the terminal.')
+    parser.add_argument('-t', '--timeout-multiplier', type=float, default=None,
+                        help='Define a multiplier for test timeout, for example '
+                        ' when running tests in particular conditions they might take'
+                        ' more time to execute. (<= 0 to disable timeout)')
+    parser.add_argument('--setup', default=None, dest='setup',
+                        help='Which test setup to use.')
+    parser.add_argument('--test-args', default=[], type=split_args,
+                        help='Arguments to pass to the specified test(s) or all tests')
+    parser.add_argument('args', nargs='*',
+                        help='Optional list of test names to run. "testname" to run all tests with that name, '
+                        '"subprojname:testname" to specifically run "testname" from "subprojname", '
+                        '"subprojname:" to run all tests defined by "subprojname".')
+
+
+def print_safe(s: str) -> None:
+    end = '' if s[-1] == '\n' else '\n'
+    try:
+        print(s, end=end)
+    except UnicodeEncodeError:
+        s = s.encode('ascii', errors='backslashreplace').decode('ascii')
+        print(s, end=end)
+
+def join_lines(a: str, b: str) -> str:
+    if not a:
+        return b
+    if not b:
+        return a
+    return a + '\n' + b
+
+def dashes(s: str, dash: str, cols: int) -> str:
+    if not s:
+        return dash * cols
+    s = ' ' + s + ' '
+    width = uniwidth(s)
+    first = (cols - width) // 2
+    s = dash * first + s
+    return s + dash * (cols - first - width)
+
+def returncode_to_status(retcode: int) -> str:
+    # Note: We can't use `os.WIFSIGNALED(result.returncode)` and the related
+    # functions here because the status returned by subprocess is munged. It
+    # returns a negative value if the process was killed by a signal rather than
+    # the raw status returned by `wait()`. Also, If a shell sits between Meson
+    # the actual unit test that shell is likely to convert a termination due
+    # to a signal into an exit status of 128 plus the signal number.
+    if retcode < 0:
+        signum = -retcode
+        try:
+            signame = signal.Signals(signum).name
+        except ValueError:
+            signame = 'SIGinvalid'
+        return f'killed by signal {signum} {signame}'
+
+    if retcode <= 128:
+        return f'exit status {retcode}'
+
+    signum = retcode - 128
+    try:
+        signame = signal.Signals(signum).name
+    except ValueError:
+        signame = 'SIGinvalid'
+    return f'(exit status {retcode} or signal {signum} {signame})'
+
+# TODO for Windows
+sh_quote: T.Callable[[str], str] = lambda x: x
+if not is_windows():
+    sh_quote = shlex.quote
+
+def env_tuple_to_str(env: T.Iterable[T.Tuple[str, str]]) -> str:
+    return ''.join(["{}={} ".format(k, sh_quote(v)) for k, v in env])
+
+
+class TestException(MesonException):
+    pass
+
+
+@enum.unique
+class ConsoleUser(enum.Enum):
+
+    # the logger can use the console
+    LOGGER = 0
+
+    # the console is used by gdb
+    GDB = 1
+
+    # the console is used to write stdout/stderr
+    STDOUT = 2
+
+
+@enum.unique
+class TestResult(enum.Enum):
+
+    PENDING = 'PENDING'
+    RUNNING = 'RUNNING'
+    OK = 'OK'
+    TIMEOUT = 'TIMEOUT'
+    INTERRUPT = 'INTERRUPT'
+    SKIP = 'SKIP'
+    FAIL = 'FAIL'
+    EXPECTEDFAIL = 'EXPECTEDFAIL'
+    UNEXPECTEDPASS = 'UNEXPECTEDPASS'
+    ERROR = 'ERROR'
+
+    @staticmethod
+    def maxlen() -> int:
+        return 14  # len(UNEXPECTEDPASS)
+
+    def is_ok(self) -> bool:
+        return self in {TestResult.OK, TestResult.EXPECTEDFAIL}
+
+    def is_bad(self) -> bool:
+        return self in {TestResult.FAIL, TestResult.TIMEOUT, TestResult.INTERRUPT,
+                        TestResult.UNEXPECTEDPASS, TestResult.ERROR}
+
+    def is_finished(self) -> bool:
+        return self not in {TestResult.PENDING, TestResult.RUNNING}
+
+    def was_killed(self) -> bool:
+        return self in (TestResult.TIMEOUT, TestResult.INTERRUPT)
+
+    def colorize(self, s: str) -> mlog.AnsiDecorator:
+        if self.is_bad():
+            decorator = mlog.red
+        elif self in (TestResult.SKIP, TestResult.EXPECTEDFAIL):
+            decorator = mlog.yellow
+        elif self.is_finished():
+            decorator = mlog.green
+        else:
+            decorator = mlog.blue
+        return decorator(s)
+
+    def get_text(self, colorize: bool) -> str:
+        result_str = '{res:{reslen}}'.format(res=self.value, reslen=self.maxlen())
+        return self.colorize(result_str).get_text(colorize)
+
+    def get_command_marker(self) -> str:
+        return str(self.colorize('>>> '))
+
+
+class TAPParser:
+    class Plan(T.NamedTuple):
+        num_tests: int
+        late: bool
+        skipped: bool
+        explanation: T.Optional[str]
+
+    class Bailout(T.NamedTuple):
+        message: str
+
+    class Test(T.NamedTuple):
+        number: int
+        name: str
+        result: TestResult
+        explanation: T.Optional[str]
+
+        def __str__(self) -> str:
+            return f'{self.number} {self.name}'.strip()
+
+    class Error(T.NamedTuple):
+        message: str
+
+    class UnknownLine(T.NamedTuple):
+        message: str
+        lineno: int
+
+    class Version(T.NamedTuple):
+        version: int
+
+    _MAIN = 1
+    _AFTER_TEST = 2
+    _YAML = 3
+
+    _RE_BAILOUT = re.compile(r'Bail out!\s*(.*)')
+    _RE_DIRECTIVE = re.compile(r'(?:\s*\#\s*([Ss][Kk][Ii][Pp]\S*|[Tt][Oo][Dd][Oo])\b\s*(.*))?')
+    _RE_PLAN = re.compile(r'1\.\.([0-9]+)' + _RE_DIRECTIVE.pattern)
+    _RE_TEST = re.compile(r'((?:not )?ok)\s*(?:([0-9]+)\s*)?([^#]*)' + _RE_DIRECTIVE.pattern)
+    _RE_VERSION = re.compile(r'TAP version ([0-9]+)')
+    _RE_YAML_START = re.compile(r'(\s+)---.*')
+    _RE_YAML_END = re.compile(r'\s+\.\.\.\s*')
+
+    found_late_test = False
+    bailed_out = False
+    plan: T.Optional[Plan] = None
+    lineno = 0
+    num_tests = 0
+    yaml_lineno: T.Optional[int] = None
+    yaml_indent = ''
+    state = _MAIN
+    version = 12
+
+    def parse_test(self, ok: bool, num: int, name: str, directive: T.Optional[str], explanation: T.Optional[str]) -> \
+            T.Generator[T.Union['TAPParser.Test', 'TAPParser.Error'], None, None]:
+        name = name.strip()
+        explanation = explanation.strip() if explanation else None
+        if directive is not None:
+            directive = directive.upper()
+            if directive.startswith('SKIP'):
+                if ok:
+                    yield self.Test(num, name, TestResult.SKIP, explanation)
+                    return
+            elif directive == 'TODO':
+                yield self.Test(num, name, TestResult.UNEXPECTEDPASS if ok else TestResult.EXPECTEDFAIL, explanation)
+                return
+            else:
+                yield self.Error(f'invalid directive "{directive}"')
+
+        yield self.Test(num, name, TestResult.OK if ok else TestResult.FAIL, explanation)
+
+    async def parse_async(self, lines: T.AsyncIterator[str]) -> T.AsyncIterator[TYPE_TAPResult]:
+        async for line in lines:
+            for event in self.parse_line(line):
+                yield event
+        for event in self.parse_line(None):
+            yield event
+
+    def parse(self, io: T.Iterator[str]) -> T.Iterator[TYPE_TAPResult]:
+        for line in io:
+            yield from self.parse_line(line)
+        yield from self.parse_line(None)
+
+    def parse_line(self, line: T.Optional[str]) -> T.Iterator[TYPE_TAPResult]:
+        if line is not None:
+            self.lineno += 1
+            line = line.rstrip()
+
+            # YAML blocks are only accepted after a test
+            if self.state == self._AFTER_TEST:
+                if self.version >= 13:
+                    m = self._RE_YAML_START.match(line)
+                    if m:
+                        self.state = self._YAML
+                        self.yaml_lineno = self.lineno
+                        self.yaml_indent = m.group(1)
+                        return
+                self.state = self._MAIN
+
+            elif self.state == self._YAML:
+                if self._RE_YAML_END.match(line):
+                    self.state = self._MAIN
+                    return
+                if line.startswith(self.yaml_indent):
+                    return
+                yield self.Error(f'YAML block not terminated (started on line {self.yaml_lineno})')
+                self.state = self._MAIN
+
+            assert self.state == self._MAIN
+            if not line or line.startswith('#'):
+                return
+
+            m = self._RE_TEST.match(line)
+            if m:
+                if self.plan and self.plan.late and not self.found_late_test:
+                    yield self.Error('unexpected test after late plan')
+                    self.found_late_test = True
+                self.num_tests += 1
+                num = self.num_tests if m.group(2) is None else int(m.group(2))
+                if num != self.num_tests:
+                    yield self.Error('out of order test numbers')
+                yield from self.parse_test(m.group(1) == 'ok', num,
+                                           m.group(3), m.group(4), m.group(5))
+                self.state = self._AFTER_TEST
+                return
+
+            m = self._RE_PLAN.match(line)
+            if m:
+                if self.plan:
+                    yield self.Error('more than one plan found')
+                else:
+                    num_tests = int(m.group(1))
+                    skipped = num_tests == 0
+                    if m.group(2):
+                        if m.group(2).upper().startswith('SKIP'):
+                            if num_tests > 0:
+                                yield self.Error('invalid SKIP directive for plan')
+                            skipped = True
+                        else:
+                            yield self.Error('invalid directive for plan')
+                    self.plan = self.Plan(num_tests=num_tests, late=(self.num_tests > 0),
+                                          skipped=skipped, explanation=m.group(3))
+                    yield self.plan
+                return
+
+            m = self._RE_BAILOUT.match(line)
+            if m:
+                yield self.Bailout(m.group(1))
+                self.bailed_out = True
+                return
+
+            m = self._RE_VERSION.match(line)
+            if m:
+                # The TAP version is only accepted as the first line
+                if self.lineno != 1:
+                    yield self.Error('version number must be on the first line')
+                    return
+                self.version = int(m.group(1))
+                if self.version < 13:
+                    yield self.Error('version number should be at least 13')
+                else:
+                    yield self.Version(version=self.version)
+                return
+
+            # unknown syntax
+            yield self.UnknownLine(line, self.lineno)
+        else:
+            # end of file
+            if self.state == self._YAML:
+                yield self.Error(f'YAML block not terminated (started on line {self.yaml_lineno})')
+
+            if not self.bailed_out and self.plan and self.num_tests != self.plan.num_tests:
+                if self.num_tests < self.plan.num_tests:
+                    yield self.Error(f'Too few tests run (expected {self.plan.num_tests}, got {self.num_tests})')
+                else:
+                    yield self.Error(f'Too many tests run (expected {self.plan.num_tests}, got {self.num_tests})')
+
+class TestLogger:
+    def flush(self) -> None:
+        pass
+
+    def start(self, harness: 'TestHarness') -> None:
+        pass
+
+    def start_test(self, harness: 'TestHarness', test: 'TestRun') -> None:
+        pass
+
+    def log_subtest(self, harness: 'TestHarness', test: 'TestRun', s: str, res: TestResult) -> None:
+        pass
+
+    def log(self, harness: 'TestHarness', result: 'TestRun') -> None:
+        pass
+
+    async def finish(self, harness: 'TestHarness') -> None:
+        pass
+
+    def close(self) -> None:
+        pass
+
+
+class TestFileLogger(TestLogger):
+    def __init__(self, filename: str, errors: str = 'replace') -> None:
+        self.filename = filename
+        self.file = open(filename, 'w', encoding='utf-8', errors=errors)
+
+    def close(self) -> None:
+        if self.file:
+            self.file.close()
+            self.file = None
+
+
+class ConsoleLogger(TestLogger):
+    ASCII_SPINNER = ['..', ':.', '.:']
+    SPINNER = ["\U0001f311", "\U0001f312", "\U0001f313", "\U0001f314",
+               "\U0001f315", "\U0001f316", "\U0001f317", "\U0001f318"]
+
+    SCISSORS = "\u2700 "
+    HLINE = "\u2015"
+    RTRI = "\u25B6 "
+
+    def __init__(self) -> None:
+        self.running_tests = OrderedSet()  # type: OrderedSet['TestRun']
+        self.progress_test = None          # type: T.Optional['TestRun']
+        self.progress_task = None          # type: T.Optional[asyncio.Future]
+        self.max_left_width = 0            # type: int
+        self.stop = False
+        # TODO: before 3.10 this cannot be created immediately, because
+        # it will create a new event loop
+        self.update: asyncio.Event
+        self.should_erase_line = ''
+        self.test_count = 0
+        self.started_tests = 0
+        self.spinner_index = 0
+        try:
+            self.cols, _ = os.get_terminal_size(1)
+            self.is_tty = True
+        except OSError:
+            self.cols = 80
+            self.is_tty = False
+
+        self.output_start = dashes(self.SCISSORS, self.HLINE, self.cols - 2)
+        self.output_end = dashes('', self.HLINE, self.cols - 2)
+        self.sub = self.RTRI
+        self.spinner = self.SPINNER
+        try:
+            self.output_start.encode(sys.stdout.encoding or 'ascii')
+        except UnicodeEncodeError:
+            self.output_start = dashes('8<', '-', self.cols - 2)
+            self.output_end = dashes('', '-', self.cols - 2)
+            self.sub = '| '
+            self.spinner = self.ASCII_SPINNER
+
+    def flush(self) -> None:
+        if self.should_erase_line:
+            print(self.should_erase_line, end='')
+            self.should_erase_line = ''
+
+    def print_progress(self, line: str) -> None:
+        print(self.should_erase_line, line, sep='', end='\r')
+        self.should_erase_line = '\x1b[K'
+
+    def request_update(self) -> None:
+        self.update.set()
+
+    def emit_progress(self, harness: 'TestHarness') -> None:
+        if self.progress_test is None:
+            self.flush()
+            return
+
+        if len(self.running_tests) == 1:
+            count = f'{self.started_tests}/{self.test_count}'
+        else:
+            count = '{}-{}/{}'.format(self.started_tests - len(self.running_tests) + 1,
+                                      self.started_tests, self.test_count)
+
+        left = '[{}] {} '.format(count, self.spinner[self.spinner_index])
+        self.spinner_index = (self.spinner_index + 1) % len(self.spinner)
+
+        right = '{spaces} {dur:{durlen}}'.format(
+            spaces=' ' * TestResult.maxlen(),
+            dur=int(time.time() - self.progress_test.starttime),
+            durlen=harness.duration_max_len)
+        if self.progress_test.timeout:
+            right += '/{timeout:{durlen}}'.format(
+                timeout=self.progress_test.timeout,
+                durlen=harness.duration_max_len)
+        right += 's'
+        details = self.progress_test.get_details()
+        if details:
+            right += '   ' + details
+
+        line = harness.format(self.progress_test, colorize=True,
+                              max_left_width=self.max_left_width,
+                              left=left, right=right)
+        self.print_progress(line)
+
+    def start(self, harness: 'TestHarness') -> None:
+        async def report_progress() -> None:
+            loop = asyncio.get_running_loop()
+            next_update = 0.0
+            self.request_update()
+            while not self.stop:
+                await self.update.wait()
+                self.update.clear()
+                # We may get here simply because the progress line has been
+                # overwritten, so do not always switch.  Only do so every
+                # second, or if the printed test has finished
+                if loop.time() >= next_update:
+                    self.progress_test = None
+                    next_update = loop.time() + 1
+                    loop.call_at(next_update, self.request_update)
+
+                if (self.progress_test and
+                        self.progress_test.res is not TestResult.RUNNING):
+                    self.progress_test = None
+
+                if not self.progress_test:
+                    if not self.running_tests:
+                        continue
+                    # Pick a test in round robin order
+                    self.progress_test = self.running_tests.pop(last=False)
+                    self.running_tests.add(self.progress_test)
+
+                self.emit_progress(harness)
+            self.flush()
+
+        self.update = asyncio.Event()
+        self.test_count = harness.test_count
+        self.cols = max(self.cols, harness.max_left_width + 30)
+
+        if self.is_tty and not harness.need_console:
+            # Account for "[aa-bb/cc] OO " in the progress report
+            self.max_left_width = 3 * len(str(self.test_count)) + 8
+            self.progress_task = asyncio.ensure_future(report_progress())
+
+    def start_test(self, harness: 'TestHarness', test: 'TestRun') -> None:
+        if test.verbose and test.cmdline:
+            self.flush()
+            print(harness.format(test, mlog.colorize_console(),
+                                 max_left_width=self.max_left_width,
+                                 right=test.res.get_text(mlog.colorize_console())))
+            print(test.res.get_command_marker() + test.cmdline)
+            if test.direct_stdout:
+                print(self.output_start, flush=True)
+            elif not test.needs_parsing:
+                print(flush=True)
+
+        self.started_tests += 1
+        self.running_tests.add(test)
+        self.running_tests.move_to_end(test, last=False)
+        self.request_update()
+
+    def shorten_log(self, harness: 'TestHarness', result: 'TestRun') -> str:
+        if not result.verbose and not harness.options.print_errorlogs:
+            return ''
+
+        log = result.get_log(mlog.colorize_console(),
+                             stderr_only=result.needs_parsing)
+        if result.verbose:
+            return log
+
+        lines = log.splitlines()
+        if len(lines) < 100:
+            return log
+        else:
+            return str(mlog.bold('Listing only the last 100 lines from a long log.\n')) + '\n'.join(lines[-100:])
+
+    def print_log(self, harness: 'TestHarness', result: 'TestRun') -> None:
+        if not result.verbose:
+            cmdline = result.cmdline
+            if not cmdline:
+                print(result.res.get_command_marker() + result.stdo)
+                return
+            print(result.res.get_command_marker() + cmdline)
+
+        log = self.shorten_log(harness, result)
+        if log:
+            print(self.output_start)
+            print_safe(log)
+            print(self.output_end)
+
+    def log_subtest(self, harness: 'TestHarness', test: 'TestRun', s: str, result: TestResult) -> None:
+        if test.verbose or (harness.options.print_errorlogs and result.is_bad()):
+            self.flush()
+            print(harness.format(test, mlog.colorize_console(), max_left_width=self.max_left_width,
+                                 prefix=self.sub,
+                                 middle=s,
+                                 right=result.get_text(mlog.colorize_console())), flush=True)
+
+            self.request_update()
+
+    def log(self, harness: 'TestHarness', result: 'TestRun') -> None:
+        self.running_tests.remove(result)
+        if result.res is TestResult.TIMEOUT and (result.verbose or
+                                                 harness.options.print_errorlogs):
+            self.flush()
+            print(f'{result.name} time out (After {result.timeout} seconds)')
+
+        if not harness.options.quiet or not result.res.is_ok():
+            self.flush()
+            if result.cmdline and result.direct_stdout:
+                print(self.output_end)
+                print(harness.format(result, mlog.colorize_console(), max_left_width=self.max_left_width))
+            else:
+                print(harness.format(result, mlog.colorize_console(), max_left_width=self.max_left_width),
+                      flush=True)
+                if result.verbose or result.res.is_bad():
+                    self.print_log(harness, result)
+            if result.warnings:
+                print(flush=True)
+                for w in result.warnings:
+                    print(w, flush=True)
+                print(flush=True)
+            if result.verbose or result.res.is_bad():
+                print(flush=True)
+
+        self.request_update()
+
+    async def finish(self, harness: 'TestHarness') -> None:
+        self.stop = True
+        self.request_update()
+        if self.progress_task:
+            await self.progress_task
+
+        if harness.collected_failures and \
+                (harness.options.print_errorlogs or harness.options.verbose):
+            print("\nSummary of Failures:\n")
+            for i, result in enumerate(harness.collected_failures, 1):
+                print(harness.format(result, mlog.colorize_console()))
+
+        print(harness.summary())
+
+
+class TextLogfileBuilder(TestFileLogger):
+    def start(self, harness: 'TestHarness') -> None:
+        self.file.write(f'Log of Meson test suite run on {datetime.datetime.now().isoformat()}\n\n')
+        inherit_env = env_tuple_to_str(os.environ.items())
+        self.file.write(f'Inherited environment: {inherit_env}\n\n')
+
+    def log(self, harness: 'TestHarness', result: 'TestRun') -> None:
+        title = f'{result.num}/{harness.test_count}'
+        self.file.write(dashes(title, '=', 78) + '\n')
+        self.file.write('test:         ' + result.name + '\n')
+        starttime_str = time.strftime("%H:%M:%S", time.gmtime(result.starttime))
+        self.file.write('start time:   ' + starttime_str + '\n')
+        self.file.write('duration:     ' + '%.2fs' % result.duration + '\n')
+        self.file.write('result:       ' + result.get_exit_status() + '\n')
+        if result.cmdline:
+            self.file.write('command:      ' + result.cmdline + '\n')
+        if result.stdo:
+            name = 'stdout' if harness.options.split else 'output'
+            self.file.write(dashes(name, '-', 78) + '\n')
+            self.file.write(result.stdo)
+        if result.stde:
+            self.file.write(dashes('stderr', '-', 78) + '\n')
+            self.file.write(result.stde)
+        self.file.write(dashes('', '=', 78) + '\n\n')
+
+    async def finish(self, harness: 'TestHarness') -> None:
+        if harness.collected_failures:
+            self.file.write("\nSummary of Failures:\n\n")
+            for i, result in enumerate(harness.collected_failures, 1):
+                self.file.write(harness.format(result, False) + '\n')
+        self.file.write(harness.summary())
+
+        print(f'Full log written to {self.filename}')
+
+
+class JsonLogfileBuilder(TestFileLogger):
+    def log(self, harness: 'TestHarness', result: 'TestRun') -> None:
+        jresult = {'name': result.name,
+                   'stdout': result.stdo,
+                   'result': result.res.value,
+                   'starttime': result.starttime,
+                   'duration': result.duration,
+                   'returncode': result.returncode,
+                   'env': result.env,
+                   'command': result.cmd}  # type: T.Dict[str, T.Any]
+        if result.stde:
+            jresult['stderr'] = result.stde
+        self.file.write(json.dumps(jresult) + '\n')
+
+
+class JunitBuilder(TestLogger):
+
+    """Builder for Junit test results.
+
+    Junit is impossible to stream out, it requires attributes counting the
+    total number of tests, failures, skips, and errors in the root element
+    and in each test suite. As such, we use a builder class to track each
+    test case, and calculate all metadata before writing it out.
+
+    For tests with multiple results (like from a TAP test), we record the
+    test as a suite with the project_name.test_name. This allows us to track
+    each result separately. For tests with only one result (such as exit-code
+    tests) we record each one into a suite with the name project_name. The use
+    of the project_name allows us to sort subproject tests separately from
+    the root project.
+    """
+
+    def __init__(self, filename: str) -> None:
+        self.filename = filename
+        self.root = et.Element(
+            'testsuites', tests='0', errors='0', failures='0')
+        self.suites = {}  # type: T.Dict[str, et.Element]
+
+    def log(self, harness: 'TestHarness', test: 'TestRun') -> None:
+        """Log a single test case."""
+        if test.junit is not None:
+            for suite in test.junit.findall('.//testsuite'):
+                # Assume that we don't need to merge anything here...
+                suite.attrib['name'] = '{}.{}.{}'.format(test.project, test.name, suite.attrib['name'])
+
+                # GTest can inject invalid attributes
+                for case in suite.findall('.//testcase[@result]'):
+                    del case.attrib['result']
+                for case in suite.findall('.//testcase[@timestamp]'):
+                    del case.attrib['timestamp']
+                for case in suite.findall('.//testcase[@file]'):
+                    del case.attrib['file']
+                for case in suite.findall('.//testcase[@line]'):
+                    del case.attrib['line']
+                self.root.append(suite)
+            return
+
+        # In this case we have a test binary with multiple results.
+        # We want to record this so that each result is recorded
+        # separately
+        if test.results:
+            suitename = f'{test.project}.{test.name}'
+            assert suitename not in self.suites or harness.options.repeat > 1, 'duplicate suite'
+
+            suite = self.suites[suitename] = et.Element(
+                'testsuite',
+                name=suitename,
+                tests=str(len(test.results)),
+                errors=str(sum(1 for r in test.results if r.result in
+                               {TestResult.INTERRUPT, TestResult.ERROR})),
+                failures=str(sum(1 for r in test.results if r.result in
+                                 {TestResult.FAIL, TestResult.UNEXPECTEDPASS, TestResult.TIMEOUT})),
+                skipped=str(sum(1 for r in test.results if r.result is TestResult.SKIP)),
+                time=str(test.duration),
+            )
+
+            for subtest in test.results:
+                # Both name and classname are required. Use the suite name as
+                # the class name, so that e.g. GitLab groups testcases correctly.
+                testcase = et.SubElement(suite, 'testcase', name=str(subtest), classname=suitename)
+                if subtest.result is TestResult.SKIP:
+                    et.SubElement(testcase, 'skipped')
+                elif subtest.result is TestResult.ERROR:
+                    et.SubElement(testcase, 'error')
+                elif subtest.result is TestResult.FAIL:
+                    et.SubElement(testcase, 'failure')
+                elif subtest.result is TestResult.UNEXPECTEDPASS:
+                    fail = et.SubElement(testcase, 'failure')
+                    fail.text = 'Test unexpected passed.'
+                elif subtest.result is TestResult.INTERRUPT:
+                    fail = et.SubElement(testcase, 'error')
+                    fail.text = 'Test was interrupted by user.'
+                elif subtest.result is TestResult.TIMEOUT:
+                    fail = et.SubElement(testcase, 'error')
+                    fail.text = 'Test did not finish before configured timeout.'
+                if subtest.explanation:
+                    et.SubElement(testcase, 'system-out').text = subtest.explanation
+            if test.stdo:
+                out = et.SubElement(suite, 'system-out')
+                out.text = test.stdo.rstrip()
+            if test.stde:
+                err = et.SubElement(suite, 'system-err')
+                err.text = test.stde.rstrip()
+        else:
+            if test.project not in self.suites:
+                suite = self.suites[test.project] = et.Element(
+                    'testsuite', name=test.project, tests='1', errors='0',
+                    failures='0', skipped='0', time=str(test.duration))
+            else:
+                suite = self.suites[test.project]
+                suite.attrib['tests'] = str(int(suite.attrib['tests']) + 1)
+
+            testcase = et.SubElement(suite, 'testcase', name=test.name,
+                                     classname=test.project, time=str(test.duration))
+            if test.res is TestResult.SKIP:
+                et.SubElement(testcase, 'skipped')
+                suite.attrib['skipped'] = str(int(suite.attrib['skipped']) + 1)
+            elif test.res is TestResult.ERROR:
+                et.SubElement(testcase, 'error')
+                suite.attrib['errors'] = str(int(suite.attrib['errors']) + 1)
+            elif test.res is TestResult.FAIL:
+                et.SubElement(testcase, 'failure')
+                suite.attrib['failures'] = str(int(suite.attrib['failures']) + 1)
+            if test.stdo:
+                out = et.SubElement(testcase, 'system-out')
+                out.text = test.stdo.rstrip()
+            if test.stde:
+                err = et.SubElement(testcase, 'system-err')
+                err.text = test.stde.rstrip()
+
+    async def finish(self, harness: 'TestHarness') -> None:
+        """Calculate total test counts and write out the xml result."""
+        for suite in self.suites.values():
+            self.root.append(suite)
+            # Skipped is really not allowed in the "testsuits" element
+            for attr in ['tests', 'errors', 'failures']:
+                self.root.attrib[attr] = str(int(self.root.attrib[attr]) + int(suite.attrib[attr]))
+
+        tree = et.ElementTree(self.root)
+        with open(self.filename, 'wb') as f:
+            tree.write(f, encoding='utf-8', xml_declaration=True)
+
+
+class TestRun:
+    TEST_NUM = 0
+    PROTOCOL_TO_CLASS: T.Dict[TestProtocol, T.Type['TestRun']] = {}
+
+    def __new__(cls, test: TestSerialisation, *args: T.Any, **kwargs: T.Any) -> T.Any:
+        return super().__new__(TestRun.PROTOCOL_TO_CLASS[test.protocol])
+
+    def __init__(self, test: TestSerialisation, test_env: T.Dict[str, str],
+                 name: str, timeout: T.Optional[int], is_parallel: bool, verbose: bool):
+        self.res = TestResult.PENDING
+        self.test = test
+        self._num = None       # type: T.Optional[int]
+        self.name = name
+        self.timeout = timeout
+        self.results = []      # type: T.List[TAPParser.Test]
+        self.returncode = None  # type: T.Optional[int]
+        self.starttime = None  # type: T.Optional[float]
+        self.duration = None   # type: T.Optional[float]
+        self.stdo = ''
+        self.stde = ''
+        self.additional_error = ''
+        self.cmd = None        # type: T.Optional[T.List[str]]
+        self.env = test_env    # type: T.Dict[str, str]
+        self.should_fail = test.should_fail
+        self.project = test.project_name
+        self.junit = None      # type: T.Optional[et.ElementTree]
+        self.is_parallel = is_parallel
+        self.verbose = verbose
+        self.warnings = []     # type: T.List[str]
+
+    def start(self, cmd: T.List[str]) -> None:
+        self.res = TestResult.RUNNING
+        self.starttime = time.time()
+        self.cmd = cmd
+
+    @property
+    def num(self) -> int:
+        if self._num is None:
+            TestRun.TEST_NUM += 1
+            self._num = TestRun.TEST_NUM
+        return self._num
+
+    @property
+    def direct_stdout(self) -> bool:
+        return self.verbose and not self.is_parallel and not self.needs_parsing
+
+    def get_results(self) -> str:
+        if self.results:
+            # running or succeeded
+            passed = sum(x.result.is_ok() for x in self.results)
+            ran = sum(x.result is not TestResult.SKIP for x in self.results)
+            if passed == ran:
+                return f'{passed} subtests passed'
+            else:
+                return f'{passed}/{ran} subtests passed'
+        return ''
+
+    def get_exit_status(self) -> str:
+        return returncode_to_status(self.returncode)
+
+    def get_details(self) -> str:
+        if self.res is TestResult.PENDING:
+            return ''
+        if self.returncode:
+            return self.get_exit_status()
+        return self.get_results()
+
+    def _complete(self) -> None:
+        if self.res == TestResult.RUNNING:
+            self.res = TestResult.OK
+        assert isinstance(self.res, TestResult)
+        if self.should_fail and self.res in (TestResult.OK, TestResult.FAIL):
+            self.res = TestResult.UNEXPECTEDPASS if self.res is TestResult.OK else TestResult.EXPECTEDFAIL
+        if self.stdo and not self.stdo.endswith('\n'):
+            self.stdo += '\n'
+        if self.stde and not self.stde.endswith('\n'):
+            self.stde += '\n'
+        self.duration = time.time() - self.starttime
+
+    @property
+    def cmdline(self) -> T.Optional[str]:
+        if not self.cmd:
+            return None
+        test_only_env = set(self.env.items()) - set(os.environ.items())
+        return env_tuple_to_str(test_only_env) + \
+            ' '.join(sh_quote(x) for x in self.cmd)
+
+    def complete_skip(self) -> None:
+        self.starttime = time.time()
+        self.returncode = GNU_SKIP_RETURNCODE
+        self.res = TestResult.SKIP
+        self._complete()
+
+    def complete(self) -> None:
+        self._complete()
+
+    def get_log(self, colorize: bool = False, stderr_only: bool = False) -> str:
+        stdo = '' if stderr_only else self.stdo
+        if self.stde or self.additional_error:
+            res = ''
+            if stdo:
+                res += mlog.cyan('stdout:').get_text(colorize) + '\n'
+                res += stdo
+                if res[-1:] != '\n':
+                    res += '\n'
+            res += mlog.cyan('stderr:').get_text(colorize) + '\n'
+            res += join_lines(self.stde, self.additional_error)
+        else:
+            res = stdo
+        if res and res[-1:] != '\n':
+            res += '\n'
+        return res
+
+    @property
+    def needs_parsing(self) -> bool:
+        return False
+
+    async def parse(self, harness: 'TestHarness', lines: T.AsyncIterator[str]) -> None:
+        async for l in lines:
+            pass
+
+
+class TestRunExitCode(TestRun):
+
+    def complete(self) -> None:
+        if self.res != TestResult.RUNNING:
+            pass
+        elif self.returncode == GNU_SKIP_RETURNCODE:
+            self.res = TestResult.SKIP
+        elif self.returncode == GNU_ERROR_RETURNCODE:
+            self.res = TestResult.ERROR
+        else:
+            self.res = TestResult.FAIL if bool(self.returncode) else TestResult.OK
+        super().complete()
+
+TestRun.PROTOCOL_TO_CLASS[TestProtocol.EXITCODE] = TestRunExitCode
+
+
+class TestRunGTest(TestRunExitCode):
+    def complete(self) -> None:
+        filename = f'{self.test.name}.xml'
+        if self.test.workdir:
+            filename = os.path.join(self.test.workdir, filename)
+
+        try:
+            with open(filename, 'r', encoding='utf8', errors='replace') as f:
+                self.junit = et.parse(f)
+        except FileNotFoundError:
+            # This can happen if the test fails to run or complete for some
+            # reason, like the rpath for libgtest isn't properly set. ExitCode
+            # will handle the failure, don't generate a stacktrace.
+            pass
+        except et.ParseError as e:
+            # ExitCode will handle the failure, don't generate a stacktrace.
+            mlog.error(f'Unable to parse {filename}: {e!s}')
+
+        super().complete()
+
+TestRun.PROTOCOL_TO_CLASS[TestProtocol.GTEST] = TestRunGTest
+
+
+class TestRunTAP(TestRun):
+    @property
+    def needs_parsing(self) -> bool:
+        return True
+
+    def complete(self) -> None:
+        if self.returncode != 0 and not self.res.was_killed():
+            self.res = TestResult.ERROR
+            self.stde = self.stde or ''
+            self.stde += f'\n(test program exited with status code {self.returncode})'
+        super().complete()
+
+    async def parse(self, harness: 'TestHarness', lines: T.AsyncIterator[str]) -> None:
+        res = None
+        warnings = [] # type: T.List[TAPParser.UnknownLine]
+        version = 12
+
+        async for i in TAPParser().parse_async(lines):
+            if isinstance(i, TAPParser.Version):
+                version = i.version
+            elif isinstance(i, TAPParser.Bailout):
+                res = TestResult.ERROR
+                harness.log_subtest(self, i.message, res)
+            elif isinstance(i, TAPParser.Test):
+                self.results.append(i)
+                if i.result.is_bad():
+                    res = TestResult.FAIL
+                harness.log_subtest(self, i.name or f'subtest {i.number}', i.result)
+            elif isinstance(i, TAPParser.UnknownLine):
+                warnings.append(i)
+            elif isinstance(i, TAPParser.Error):
+                self.additional_error += 'TAP parsing error: ' + i.message
+                res = TestResult.ERROR
+
+        if warnings:
+            unknown = str(mlog.yellow('UNKNOWN'))
+            width = len(str(max(i.lineno for i in warnings)))
+            for w in warnings:
+                self.warnings.append(f'stdout: {w.lineno:{width}}: {unknown}: {w.message}')
+            if version > 13:
+                self.warnings.append('Unknown TAP output lines have been ignored. Please open a feature request to\n'
+                                     'implement them, or prefix them with a # if they are not TAP syntax.')
+            else:
+                self.warnings.append(str(mlog.red('ERROR')) + ': Unknown TAP output lines for a supported TAP version.\n'
+                                     'This is probably a bug in the test; if they are not TAP syntax, prefix them with a #')
+        if all(t.result is TestResult.SKIP for t in self.results):
+            # This includes the case where self.results is empty
+            res = TestResult.SKIP
+
+        if res and self.res == TestResult.RUNNING:
+            self.res = res
+
+TestRun.PROTOCOL_TO_CLASS[TestProtocol.TAP] = TestRunTAP
+
+
+class TestRunRust(TestRun):
+    @property
+    def needs_parsing(self) -> bool:
+        return True
+
+    async def parse(self, harness: 'TestHarness', lines: T.AsyncIterator[str]) -> None:
+        def parse_res(n: int, name: str, result: str) -> TAPParser.Test:
+            if result == 'ok':
+                return TAPParser.Test(n, name, TestResult.OK, None)
+            elif result == 'ignored':
+                return TAPParser.Test(n, name, TestResult.SKIP, None)
+            elif result == 'FAILED':
+                return TAPParser.Test(n, name, TestResult.FAIL, None)
+            return TAPParser.Test(n, name, TestResult.ERROR,
+                                  f'Unsupported output from rust test: {result}')
+
+        n = 1
+        async for line in lines:
+            if line.startswith('test ') and not line.startswith('test result'):
+                _, name, _, result = line.rstrip().split(' ')
+                name = name.replace('::', '.')
+                t = parse_res(n, name, result)
+                self.results.append(t)
+                harness.log_subtest(self, name, t.result)
+                n += 1
+
+        res = None
+
+        if all(t.result is TestResult.SKIP for t in self.results):
+            # This includes the case where self.results is empty
+            res = TestResult.SKIP
+        elif any(t.result is TestResult.ERROR for t in self.results):
+            res = TestResult.ERROR
+        elif any(t.result is TestResult.FAIL for t in self.results):
+            res = TestResult.FAIL
+
+        if res and self.res == TestResult.RUNNING:
+            self.res = res
+
+TestRun.PROTOCOL_TO_CLASS[TestProtocol.RUST] = TestRunRust
+
+# Check unencodable characters in xml output and replace them with
+# their printable representation
+def replace_unencodable_xml_chars(original_str: str) -> str:
+    # [1:-1] is needed for removing `'` characters from both start and end
+    # of the string
+    replacement_lambda = lambda illegal_chr: repr(illegal_chr.group())[1:-1]
+    return UNENCODABLE_XML_CHRS_RE.sub(replacement_lambda, original_str)
+
+def decode(stream: T.Union[None, bytes]) -> str:
+    if stream is None:
+        return ''
+    try:
+        return replace_unencodable_xml_chars(stream.decode('utf-8'))
+    except UnicodeDecodeError:
+        return replace_unencodable_xml_chars(stream.decode('iso-8859-1', errors='ignore'))
+
+async def read_decode(reader: asyncio.StreamReader,
+                      queue: T.Optional['asyncio.Queue[T.Optional[str]]'],
+                      console_mode: ConsoleUser) -> str:
+    stdo_lines = []
+    try:
+        while not reader.at_eof():
+            # Prefer splitting by line, as that produces nicer output
+            try:
+                line_bytes = await reader.readuntil(b'\n')
+            except asyncio.IncompleteReadError as e:
+                line_bytes = e.partial
+            except asyncio.LimitOverrunError as e:
+                line_bytes = await reader.readexactly(e.consumed)
+            if line_bytes:
+                line = decode(line_bytes)
+                stdo_lines.append(line)
+                if console_mode is ConsoleUser.STDOUT:
+                    print(line, end='', flush=True)
+                if queue:
+                    await queue.put(line)
+        return ''.join(stdo_lines)
+    except asyncio.CancelledError:
+        return ''.join(stdo_lines)
+    finally:
+        if queue:
+            await queue.put(None)
+
+def run_with_mono(fname: str) -> bool:
+    return fname.endswith('.exe') and not (is_windows() or is_cygwin())
+
+def check_testdata(objs: T.List[TestSerialisation]) -> T.List[TestSerialisation]:
+    if not isinstance(objs, list):
+        raise MesonVersionMismatchException('', coredata_version)
+    for obj in objs:
+        if not isinstance(obj, TestSerialisation):
+            raise MesonVersionMismatchException('', coredata_version)
+        if not hasattr(obj, 'version'):
+            raise MesonVersionMismatchException('', coredata_version)
+        if major_versions_differ(obj.version, coredata_version):
+            raise MesonVersionMismatchException(obj.version, coredata_version)
+    return objs
+
+# Custom waiting primitives for asyncio
+
+async def queue_iter(q: 'asyncio.Queue[T.Optional[str]]') -> T.AsyncIterator[str]:
+    while True:
+        item = await q.get()
+        q.task_done()
+        if item is None:
+            break
+        yield item
+
+async def complete(future: asyncio.Future) -> None:
+    """Wait for completion of the given future, ignoring cancellation."""
+    try:
+        await future
+    except asyncio.CancelledError:
+        pass
+
+async def complete_all(futures: T.Iterable[asyncio.Future],
+                       timeout: T.Optional[T.Union[int, float]] = None) -> None:
+    """Wait for completion of all the given futures, ignoring cancellation.
+       If timeout is not None, raise an asyncio.TimeoutError after the given
+       time has passed.  asyncio.TimeoutError is only raised if some futures
+       have not completed and none have raised exceptions, even if timeout
+       is zero."""
+
+    def check_futures(futures: T.Iterable[asyncio.Future]) -> None:
+        # Raise exceptions if needed
+        left = False
+        for f in futures:
+            if not f.done():
+                left = True
+            elif not f.cancelled():
+                f.result()
+        if left:
+            raise asyncio.TimeoutError
+
+    # Python is silly and does not have a variant of asyncio.wait with an
+    # absolute time as deadline.
+    loop = asyncio.get_running_loop()
+    deadline = None if timeout is None else loop.time() + timeout
+    while futures and (timeout is None or timeout > 0):
+        done, futures = await asyncio.wait(futures, timeout=timeout,
+                                           return_when=asyncio.FIRST_EXCEPTION)
+        check_futures(done)
+        if deadline:
+            timeout = deadline - loop.time()
+
+    check_futures(futures)
+
+
+class TestSubprocess:
+    def __init__(self, p: asyncio.subprocess.Process,
+                 stdout: T.Optional[int], stderr: T.Optional[int],
+                 postwait_fn: T.Callable[[], None] = None):
+        self._process = p
+        self.stdout = stdout
+        self.stderr = stderr
+        self.stdo_task: T.Optional[asyncio.Task[None]] = None
+        self.stde_task: T.Optional[asyncio.Task[None]] = None
+        self.postwait_fn = postwait_fn   # type: T.Callable[[], None]
+        self.all_futures = []            # type: T.List[asyncio.Future]
+        self.queue = None                # type: T.Optional[asyncio.Queue[T.Optional[str]]]
+
+    def stdout_lines(self) -> T.AsyncIterator[str]:
+        self.queue = asyncio.Queue()
+        return queue_iter(self.queue)
+
+    def communicate(self,
+                    test: 'TestRun',
+                    console_mode: ConsoleUser) -> T.Tuple[T.Optional[T.Awaitable[str]],
+                                                          T.Optional[T.Awaitable[str]]]:
+        async def collect_stdo(test: 'TestRun',
+                               reader: asyncio.StreamReader,
+                               console_mode: ConsoleUser) -> None:
+            test.stdo = await read_decode(reader, self.queue, console_mode)
+
+        async def collect_stde(test: 'TestRun',
+                               reader: asyncio.StreamReader,
+                               console_mode: ConsoleUser) -> None:
+            test.stde = await read_decode(reader, None, console_mode)
+
+        # asyncio.ensure_future ensures that printing can
+        # run in the background, even before it is awaited
+        if self.stdo_task is None and self.stdout is not None:
+            decode_coro = collect_stdo(test, self._process.stdout, console_mode)
+            self.stdo_task = asyncio.ensure_future(decode_coro)
+            self.all_futures.append(self.stdo_task)
+        if self.stderr is not None and self.stderr != asyncio.subprocess.STDOUT:
+            decode_coro = collect_stde(test, self._process.stderr, console_mode)
+            self.stde_task = asyncio.ensure_future(decode_coro)
+            self.all_futures.append(self.stde_task)
+
+        return self.stdo_task, self.stde_task
+
+    async def _kill(self) -> T.Optional[str]:
+        # Python does not provide multiplatform support for
+        # killing a process and all its children so we need
+        # to roll our own.
+        p = self._process
+        try:
+            if is_windows():
+                subprocess.run(['taskkill', '/F', '/T', '/PID', str(p.pid)])
+            else:
+                # Send a termination signal to the process group that setsid()
+                # created - giving it a chance to perform any cleanup.
+                os.killpg(p.pid, signal.SIGTERM)
+
+                # Make sure the termination signal actually kills the process
+                # group, otherwise retry with a SIGKILL.
+                with suppress(asyncio.TimeoutError):
+                    await asyncio.wait_for(p.wait(), timeout=0.5)
+                if p.returncode is not None:
+                    return None
+
+                os.killpg(p.pid, signal.SIGKILL)
+
+            with suppress(asyncio.TimeoutError):
+                await asyncio.wait_for(p.wait(), timeout=1)
+            if p.returncode is not None:
+                return None
+
+            # An earlier kill attempt has not worked for whatever reason.
+            # Try to kill it one last time with a direct call.
+            # If the process has spawned children, they will remain around.
+            p.kill()
+            with suppress(asyncio.TimeoutError):
+                await asyncio.wait_for(p.wait(), timeout=1)
+            if p.returncode is not None:
+                return None
+            return 'Test process could not be killed.'
+        except ProcessLookupError:
+            # Sometimes (e.g. with Wine) this happens.  There's nothing
+            # we can do, probably the process already died so just wait
+            # for the event loop to pick that up.
+            await p.wait()
+            return None
+        finally:
+            if self.stdo_task:
+                self.stdo_task.cancel()
+            if self.stde_task:
+                self.stde_task.cancel()
+
+    async def wait(self, test: 'TestRun') -> None:
+        p = self._process
+
+        self.all_futures.append(asyncio.ensure_future(p.wait()))
+        try:
+            await complete_all(self.all_futures, timeout=test.timeout)
+        except asyncio.TimeoutError:
+            test.additional_error += await self._kill() or ''
+            test.res = TestResult.TIMEOUT
+        except asyncio.CancelledError:
+            # The main loop must have seen Ctrl-C.
+            test.additional_error += await self._kill() or ''
+            test.res = TestResult.INTERRUPT
+        finally:
+            if self.postwait_fn:
+                self.postwait_fn()
+
+        test.returncode = p.returncode or 0
+
+class SingleTestRunner:
+
+    def __init__(self, test: TestSerialisation, env: T.Dict[str, str], name: str,
+                 options: argparse.Namespace):
+        self.test = test
+        self.options = options
+        self.cmd = self._get_cmd()
+
+        if self.cmd and self.test.extra_paths:
+            env['PATH'] = os.pathsep.join(self.test.extra_paths + ['']) + env['PATH']
+            winecmd = []
+            for c in self.cmd:
+                winecmd.append(c)
+                if os.path.basename(c).startswith('wine'):
+                    env['WINEPATH'] = get_wine_shortpath(
+                        winecmd,
+                        ['Z:' + p for p in self.test.extra_paths] + env.get('WINEPATH', '').split(';'),
+                        self.test.workdir
+                    )
+                    break
+
+        # If MALLOC_PERTURB_ is not set, or if it is set to an empty value,
+        # (i.e., the test or the environment don't explicitly set it), set
+        # it ourselves. We do this unconditionally for regular tests
+        # because it is extremely useful to have.
+        # Setting MALLOC_PERTURB_="0" will completely disable this feature.
+        if ('MALLOC_PERTURB_' not in env or not env['MALLOC_PERTURB_']) and not options.benchmark:
+            env['MALLOC_PERTURB_'] = str(random.randint(1, 255))
+
+        if self.options.gdb or self.test.timeout is None or self.test.timeout <= 0:
+            timeout = None
+        elif self.options.timeout_multiplier is None:
+            timeout = self.test.timeout
+        elif self.options.timeout_multiplier <= 0:
+            timeout = None
+        else:
+            timeout = self.test.timeout * self.options.timeout_multiplier
+
+        is_parallel = test.is_parallel and self.options.num_processes > 1 and not self.options.gdb
+        verbose = (test.verbose or self.options.verbose) and not self.options.quiet
+        self.runobj = TestRun(test, env, name, timeout, is_parallel, verbose)
+
+        if self.options.gdb:
+            self.console_mode = ConsoleUser.GDB
+        elif self.runobj.direct_stdout:
+            self.console_mode = ConsoleUser.STDOUT
+        else:
+            self.console_mode = ConsoleUser.LOGGER
+
+    def _get_test_cmd(self) -> T.Optional[T.List[str]]:
+        testentry = self.test.fname[0]
+        if self.options.no_rebuild and self.test.cmd_is_built and not os.path.isfile(testentry):
+            raise TestException(f'The test program {testentry!r} does not exist. Cannot run tests before building them.')
+        if testentry.endswith('.jar'):
+            return ['java', '-jar'] + self.test.fname
+        elif not self.test.is_cross_built and run_with_mono(testentry):
+            return ['mono'] + self.test.fname
+        elif self.test.cmd_is_exe and self.test.is_cross_built and self.test.needs_exe_wrapper:
+            if self.test.exe_wrapper is None:
+                # Can not run test on cross compiled executable
+                # because there is no execute wrapper.
+                return None
+            elif self.test.cmd_is_exe:
+                # If the command is not built (ie, its a python script),
+                # then we don't check for the exe-wrapper
+                if not self.test.exe_wrapper.found():
+                    msg = ('The exe_wrapper defined in the cross file {!r} was not '
+                           'found. Please check the command and/or add it to PATH.')
+                    raise TestException(msg.format(self.test.exe_wrapper.name))
+                return self.test.exe_wrapper.get_command() + self.test.fname
+        return self.test.fname
+
+    def _get_cmd(self) -> T.Optional[T.List[str]]:
+        test_cmd = self._get_test_cmd()
+        if not test_cmd:
+            return None
+        return TestHarness.get_wrapper(self.options) + test_cmd
+
+    @property
+    def is_parallel(self) -> bool:
+        return self.runobj.is_parallel
+
+    @property
+    def visible_name(self) -> str:
+        return self.runobj.name
+
+    @property
+    def timeout(self) -> T.Optional[int]:
+        return self.runobj.timeout
+
+    async def run(self, harness: 'TestHarness') -> TestRun:
+        if self.cmd is None:
+            self.stdo = 'Not run because cannot execute cross compiled binaries.'
+            harness.log_start_test(self.runobj)
+            self.runobj.complete_skip()
+        else:
+            cmd = self.cmd + self.test.cmd_args + self.options.test_args
+            self.runobj.start(cmd)
+            harness.log_start_test(self.runobj)
+            await self._run_cmd(harness, cmd)
+        return self.runobj
+
+    async def _run_subprocess(self, args: T.List[str], *,
+                              stdout: T.Optional[int], stderr: T.Optional[int],
+                              env: T.Dict[str, str], cwd: T.Optional[str]) -> TestSubprocess:
+        # Let gdb handle ^C instead of us
+        if self.options.gdb:
+            previous_sigint_handler = signal.getsignal(signal.SIGINT)
+            # Make the meson executable ignore SIGINT while gdb is running.
+            signal.signal(signal.SIGINT, signal.SIG_IGN)
+
+        def preexec_fn() -> None:
+            if self.options.gdb:
+                # Restore the SIGINT handler for the child process to
+                # ensure it can handle it.
+                signal.signal(signal.SIGINT, signal.SIG_DFL)
+            else:
+                # We don't want setsid() in gdb because gdb needs the
+                # terminal in order to handle ^C and not show tcsetpgrp()
+                # errors avoid not being able to use the terminal.
+                os.setsid()
+
+        def postwait_fn() -> None:
+            if self.options.gdb:
+                # Let us accept ^C again
+                signal.signal(signal.SIGINT, previous_sigint_handler)
+
+        p = await asyncio.create_subprocess_exec(*args,
+                                                 stdout=stdout,
+                                                 stderr=stderr,
+                                                 env=env,
+                                                 cwd=cwd,
+                                                 preexec_fn=preexec_fn if not is_windows() else None)
+        return TestSubprocess(p, stdout=stdout, stderr=stderr,
+                              postwait_fn=postwait_fn if not is_windows() else None)
+
+    async def _run_cmd(self, harness: 'TestHarness', cmd: T.List[str]) -> None:
+        if self.console_mode is ConsoleUser.GDB:
+            stdout = None
+            stderr = None
+        else:
+            stdout = asyncio.subprocess.PIPE
+            stderr = asyncio.subprocess.STDOUT \
+                if not self.options.split and not self.runobj.needs_parsing \
+                else asyncio.subprocess.PIPE
+
+        extra_cmd = []  # type: T.List[str]
+        if self.test.protocol is TestProtocol.GTEST:
+            gtestname = self.test.name
+            if self.test.workdir:
+                gtestname = os.path.join(self.test.workdir, self.test.name)
+            extra_cmd.append(f'--gtest_output=xml:{gtestname}.xml')
+
+        p = await self._run_subprocess(cmd + extra_cmd,
+                                       stdout=stdout,
+                                       stderr=stderr,
+                                       env=self.runobj.env,
+                                       cwd=self.test.workdir)
+
+        if self.runobj.needs_parsing:
+            parse_coro = self.runobj.parse(harness, p.stdout_lines())
+            parse_task = asyncio.ensure_future(parse_coro)
+        else:
+            parse_task = None
+
+        stdo_task, stde_task = p.communicate(self.runobj, self.console_mode)
+        await p.wait(self.runobj)
+
+        if parse_task:
+            await parse_task
+        if stdo_task:
+            await stdo_task
+        if stde_task:
+            await stde_task
+
+        self.runobj.complete()
+
+
+class TestHarness:
+    def __init__(self, options: argparse.Namespace):
+        self.options = options
+        self.collected_failures = []  # type: T.List[TestRun]
+        self.fail_count = 0
+        self.expectedfail_count = 0
+        self.unexpectedpass_count = 0
+        self.success_count = 0
+        self.skip_count = 0
+        self.timeout_count = 0
+        self.test_count = 0
+        self.name_max_len = 0
+        self.is_run = False
+        self.loggers = []         # type: T.List[TestLogger]
+        self.console_logger = ConsoleLogger()
+        self.loggers.append(self.console_logger)
+        self.need_console = False
+        self.ninja = None # type: T.List[str]
+
+        self.logfile_base = None  # type: T.Optional[str]
+        if self.options.logbase and not self.options.gdb:
+            namebase = None
+            self.logfile_base = os.path.join(self.options.wd, 'meson-logs', self.options.logbase)
+
+            if self.options.wrapper:
+                namebase = os.path.basename(self.get_wrapper(self.options)[0])
+            elif self.options.setup:
+                namebase = self.options.setup.replace(":", "_")
+
+            if namebase:
+                self.logfile_base += '-' + namebase.replace(' ', '_')
+
+        self.prepare_build()
+        self.load_metadata()
+
+        ss = set()
+        for t in self.tests:
+            for s in t.suite:
+                ss.add(s)
+        self.suites = list(ss)
+
+    def get_console_logger(self) -> 'ConsoleLogger':
+        assert self.console_logger
+        return self.console_logger
+
+    def prepare_build(self) -> None:
+        if self.options.no_rebuild:
+            return
+
+        self.ninja = environment.detect_ninja()
+        if not self.ninja:
+            print("Can't find ninja, can't rebuild test.")
+            # If ninja can't be found return exit code 127, indicating command
+            # not found for shell, which seems appropriate here. This works
+            # nicely for `git bisect run`, telling it to abort - no point in
+            # continuing if there's no ninja.
+            sys.exit(127)
+
+    def load_metadata(self) -> None:
+        startdir = os.getcwd()
+        try:
+            os.chdir(self.options.wd)
+
+            # Before loading build / test data, make sure that the build
+            # configuration does not need to be regenerated. This needs to
+            # happen before rebuild_deps(), because we need the correct list of
+            # tests and their dependencies to compute
+            if not self.options.no_rebuild:
+                teststdo = subprocess.run(self.ninja + ['-n', 'build.ninja'], capture_output=True).stdout
+                if b'ninja: no work to do.' not in teststdo and b'samu: nothing to do' not in teststdo:
+                    stdo = sys.stderr if self.options.list else sys.stdout
+                    ret = subprocess.run(self.ninja + ['build.ninja'], stdout=stdo.fileno())
+                    if ret.returncode != 0:
+                        raise TestException(f'Could not configure {self.options.wd!r}')
+
+            self.build_data = build.load(os.getcwd())
+            if not self.options.setup:
+                self.options.setup = self.build_data.test_setup_default_name
+            if self.options.benchmark:
+                self.tests = self.load_tests('meson_benchmark_setup.dat')
+            else:
+                self.tests = self.load_tests('meson_test_setup.dat')
+        finally:
+            os.chdir(startdir)
+
+    def load_tests(self, file_name: str) -> T.List[TestSerialisation]:
+        datafile = Path('meson-private') / file_name
+        if not datafile.is_file():
+            raise TestException(f'Directory {self.options.wd!r} does not seem to be a Meson build directory.')
+        with datafile.open('rb') as f:
+            objs = check_testdata(pickle.load(f))
+        return objs
+
+    def __enter__(self) -> 'TestHarness':
+        return self
+
+    def __exit__(self, exc_type: T.Any, exc_value: T.Any, traceback: T.Any) -> None:
+        self.close_logfiles()
+
+    def close_logfiles(self) -> None:
+        for l in self.loggers:
+            l.close()
+        self.console_logger = None
+
+    def get_test_setup(self, test: T.Optional[TestSerialisation]) -> build.TestSetup:
+        if ':' in self.options.setup:
+            if self.options.setup not in self.build_data.test_setups:
+                sys.exit(f"Unknown test setup '{self.options.setup}'.")
+            return self.build_data.test_setups[self.options.setup]
+        else:
+            full_name = test.project_name + ":" + self.options.setup
+            if full_name not in self.build_data.test_setups:
+                sys.exit(f"Test setup '{self.options.setup}' not found from project '{test.project_name}'.")
+            return self.build_data.test_setups[full_name]
+
+    def merge_setup_options(self, options: argparse.Namespace, test: TestSerialisation) -> T.Dict[str, str]:
+        current = self.get_test_setup(test)
+        if not options.gdb:
+            options.gdb = current.gdb
+        if options.gdb:
+            options.verbose = True
+        if options.timeout_multiplier is None:
+            options.timeout_multiplier = current.timeout_multiplier
+    #    if options.env is None:
+    #        options.env = current.env # FIXME, should probably merge options here.
+        if options.wrapper is None:
+            options.wrapper = current.exe_wrapper
+        elif current.exe_wrapper:
+            sys.exit('Conflict: both test setup and command line specify an exe wrapper.')
+        return current.env.get_env(os.environ.copy())
+
+    def get_test_runner(self, test: TestSerialisation) -> SingleTestRunner:
+        name = self.get_pretty_suite(test)
+        options = deepcopy(self.options)
+        if self.options.setup:
+            env = self.merge_setup_options(options, test)
+        else:
+            env = os.environ.copy()
+        test_env = test.env.get_env(env)
+        env.update(test_env)
+        if (test.is_cross_built and test.needs_exe_wrapper and
+                test.exe_wrapper and test.exe_wrapper.found()):
+            env['MESON_EXE_WRAPPER'] = join_args(test.exe_wrapper.get_command())
+        return SingleTestRunner(test, env, name, options)
+
+    def process_test_result(self, result: TestRun) -> None:
+        if result.res is TestResult.TIMEOUT:
+            self.timeout_count += 1
+        elif result.res is TestResult.SKIP:
+            self.skip_count += 1
+        elif result.res is TestResult.OK:
+            self.success_count += 1
+        elif result.res in {TestResult.FAIL, TestResult.ERROR, TestResult.INTERRUPT}:
+            self.fail_count += 1
+        elif result.res is TestResult.EXPECTEDFAIL:
+            self.expectedfail_count += 1
+        elif result.res is TestResult.UNEXPECTEDPASS:
+            self.unexpectedpass_count += 1
+        else:
+            sys.exit(f'Unknown test result encountered: {result.res}')
+
+        if result.res.is_bad():
+            self.collected_failures.append(result)
+        for l in self.loggers:
+            l.log(self, result)
+
+    @property
+    def numlen(self) -> int:
+        return len(str(self.test_count))
+
+    @property
+    def max_left_width(self) -> int:
+        return 2 * self.numlen + 2
+
+    def get_test_num_prefix(self, num: int) -> str:
+        return '{num:{numlen}}/{testcount} '.format(numlen=self.numlen,
+                                                    num=num,
+                                                    testcount=self.test_count)
+
+    def format(self, result: TestRun, colorize: bool,
+               max_left_width: int = 0,
+               prefix: str = '',
+               left: T.Optional[str] = None,
+               middle: T.Optional[str] = None,
+               right: T.Optional[str] = None) -> str:
+        if left is None:
+            left = self.get_test_num_prefix(result.num)
+
+        # A non-default max_left_width lets the logger print more stuff before the
+        # name, while ensuring that the rightmost columns remain aligned.
+        max_left_width = max(max_left_width, self.max_left_width)
+
+        if middle is None:
+            middle = result.name
+        extra_mid_width = max_left_width + self.name_max_len + 1 - uniwidth(middle) - uniwidth(left) - uniwidth(prefix)
+        middle += ' ' * max(1, extra_mid_width)
+
+        if right is None:
+            right = '{res} {dur:{durlen}.2f}s'.format(
+                res=result.res.get_text(colorize),
+                dur=result.duration,
+                durlen=self.duration_max_len + 3)
+            details = result.get_details()
+            if details:
+                right += '   ' + details
+        return prefix + left + middle + right
+
+    def summary(self) -> str:
+        return textwrap.dedent('''
+            Ok:                 {:<4}
+            Expected Fail:      {:<4}
+            Fail:               {:<4}
+            Unexpected Pass:    {:<4}
+            Skipped:            {:<4}
+            Timeout:            {:<4}
+            ''').format(self.success_count, self.expectedfail_count, self.fail_count,
+                        self.unexpectedpass_count, self.skip_count, self.timeout_count)
+
+    def total_failure_count(self) -> int:
+        return self.fail_count + self.unexpectedpass_count + self.timeout_count
+
+    def doit(self) -> int:
+        if self.is_run:
+            raise RuntimeError('Test harness object can only be used once.')
+        self.is_run = True
+        tests = self.get_tests()
+        if not tests:
+            return 0
+        if not self.options.no_rebuild and not rebuild_deps(self.ninja, self.options.wd, tests):
+            # We return 125 here in case the build failed.
+            # The reason is that exit code 125 tells `git bisect run` that the current
+            # commit should be skipped.  Thus users can directly use `meson test` to
+            # bisect without needing to handle the does-not-build case separately in a
+            # wrapper script.
+            sys.exit(125)
+
+        self.name_max_len = max(uniwidth(self.get_pretty_suite(test)) for test in tests)
+        self.options.num_processes = min(self.options.num_processes,
+                                         len(tests) * self.options.repeat)
+        startdir = os.getcwd()
+        try:
+            os.chdir(self.options.wd)
+            runners = []             # type: T.List[SingleTestRunner]
+            for i in range(self.options.repeat):
+                runners.extend(self.get_test_runner(test) for test in tests)
+                if i == 0:
+                    self.duration_max_len = max(len(str(int(runner.timeout or 99)))
+                                                for runner in runners)
+                    # Disable the progress report if it gets in the way
+                    self.need_console = any(runner.console_mode is not ConsoleUser.LOGGER
+                                            for runner in runners)
+
+            self.test_count = len(runners)
+            self.run_tests(runners)
+        finally:
+            os.chdir(startdir)
+        return self.total_failure_count()
+
+    @staticmethod
+    def split_suite_string(suite: str) -> T.Tuple[str, str]:
+        if ':' in suite:
+            split = suite.split(':', 1)
+            assert len(split) == 2
+            return split[0], split[1]
+        else:
+            return suite, ""
+
+    @staticmethod
+    def test_in_suites(test: TestSerialisation, suites: T.List[str]) -> bool:
+        for suite in suites:
+            (prj_match, st_match) = TestHarness.split_suite_string(suite)
+            for prjst in test.suite:
+                (prj, st) = TestHarness.split_suite_string(prjst)
+
+                # the SUITE can be passed as
+                #     suite_name
+                # or
+                #     project_name:suite_name
+                # so we need to select only the test belonging to project_name
+
+                # this if handle the first case (i.e., SUITE == suite_name)
+
+                # in this way we can run tests belonging to different
+                # (sub)projects which share the same suite_name
+                if not st_match and st == prj_match:
+                    return True
+
+                # these two conditions are needed to handle the second option
+                # i.e., SUITE == project_name:suite_name
+
+                # in this way we select the only the tests of
+                # project_name with suite_name
+                if prj_match and prj != prj_match:
+                    continue
+                if st_match and st != st_match:
+                    continue
+                return True
+        return False
+
+    def test_suitable(self, test: TestSerialisation) -> bool:
+        if TestHarness.test_in_suites(test, self.options.exclude_suites):
+            return False
+
+        if self.options.include_suites:
+            # Both force inclusion (overriding add_test_setup) and exclude
+            # everything else
+            return TestHarness.test_in_suites(test, self.options.include_suites)
+
+        if self.options.setup:
+            setup = self.get_test_setup(test)
+            if TestHarness.test_in_suites(test, setup.exclude_suites):
+                return False
+
+        return True
+
+    def tests_from_args(self, tests: T.List[TestSerialisation]) -> T.Generator[TestSerialisation, None, None]:
+        '''
+        Allow specifying test names like "meson test foo1 foo2", where test('foo1', ...)
+
+        Also support specifying the subproject to run tests from like
+        "meson test subproj:" (all tests inside subproj) or "meson test subproj:foo1"
+        to run foo1 inside subproj. Coincidentally also "meson test :foo1" to
+        run all tests with that name across all subprojects, which is
+        identical to "meson test foo1"
+        '''
+        patterns: T.Dict[T.Tuple[str, str], bool] = {}
+        for arg in self.options.args:
+            # Replace empty components by wildcards:
+            # '' -> '*:*'
+            # 'name' -> '*:name'
+            # ':name' -> '*:name'
+            # 'proj:' -> 'proj:*'
+            if ':' in arg:
+                subproj, name = arg.split(':', maxsplit=1)
+                if name == '':
+                    name = '*'
+                if subproj == '':  # in case arg was ':'
+                    subproj = '*'
+            else:
+                subproj, name = '*', arg
+            patterns[(subproj, name)] = False
+
+        for t in tests:
+            # For each test, find the first matching pattern
+            # and mark it as used. yield the matching tests.
+            for subproj, name in list(patterns):
+                if fnmatch(t.project_name, subproj) and fnmatch(t.name, name):
+                    patterns[(subproj, name)] = True
+                    yield t
+                    break
+
+        for (subproj, name), was_used in patterns.items():
+            if not was_used:
+                # For each unused pattern...
+                arg = f'{subproj}:{name}'
+                for t in tests:
+                    # ... if it matches a test, then it wasn't used because another
+                    # pattern matched the same test before.
+                    # Report it as a warning.
+                    if fnmatch(t.project_name, subproj) and fnmatch(t.name, name):
+                        mlog.warning(f'{arg} test name is redundant and was not used')
+                        break
+                else:
+                    # If the pattern doesn't match any test,
+                    # report it as an error. We don't want the `test` command to
+                    # succeed on an invalid pattern.
+                    raise MesonException(f'{arg} test name does not match any test')
+
+    def get_tests(self, errorfile: T.Optional[T.IO] = sys.stdout) -> T.List[TestSerialisation]:
+        if not self.tests:
+            print('No tests defined.', file=errorfile)
+            return []
+
+        tests = [t for t in self.tests if self.test_suitable(t)]
+        if self.options.args:
+            tests = list(self.tests_from_args(tests))
+
+        if not tests:
+            print('No suitable tests defined.', file=errorfile)
+            return []
+
+        return tests
+
+    def flush_logfiles(self) -> None:
+        for l in self.loggers:
+            l.flush()
+
+    def open_logfiles(self) -> None:
+        if not self.logfile_base:
+            return
+
+        self.loggers.append(JunitBuilder(self.logfile_base + '.junit.xml'))
+        self.loggers.append(JsonLogfileBuilder(self.logfile_base + '.json'))
+        self.loggers.append(TextLogfileBuilder(self.logfile_base + '.txt', errors='surrogateescape'))
+
+    @staticmethod
+    def get_wrapper(options: argparse.Namespace) -> T.List[str]:
+        wrap = []  # type: T.List[str]
+        if options.gdb:
+            wrap = [options.gdb_path, '--quiet']
+            if options.repeat > 1:
+                wrap += ['-ex', 'run', '-ex', 'quit']
+            # Signal the end of arguments to gdb
+            wrap += ['--args']
+        if options.wrapper:
+            wrap += options.wrapper
+        return wrap
+
+    def get_pretty_suite(self, test: TestSerialisation) -> str:
+        if len(self.suites) > 1 and test.suite:
+            rv = TestHarness.split_suite_string(test.suite[0])[0]
+            s = "+".join(TestHarness.split_suite_string(s)[1] for s in test.suite)
+            if s:
+                rv += ":"
+            return rv + s + " / " + test.name
+        else:
+            return test.name
+
+    def run_tests(self, runners: T.List[SingleTestRunner]) -> None:
+        try:
+            self.open_logfiles()
+
+            # TODO: this is the default for python 3.8
+            if sys.platform == 'win32':
+                asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy())
+
+            asyncio.run(self._run_tests(runners))
+        finally:
+            self.close_logfiles()
+
+    def log_subtest(self, test: TestRun, s: str, res: TestResult) -> None:
+        for l in self.loggers:
+            l.log_subtest(self, test, s, res)
+
+    def log_start_test(self, test: TestRun) -> None:
+        for l in self.loggers:
+            l.start_test(self, test)
+
+    async def _run_tests(self, runners: T.List[SingleTestRunner]) -> None:
+        semaphore = asyncio.Semaphore(self.options.num_processes)
+        futures = deque()  # type: T.Deque[asyncio.Future]
+        running_tests = {}  # type: T.Dict[asyncio.Future, str]
+        interrupted = False
+        ctrlc_times = deque(maxlen=MAX_CTRLC)  # type: T.Deque[float]
+        loop = asyncio.get_running_loop()
+
+        async def run_test(test: SingleTestRunner) -> None:
+            async with semaphore:
+                if interrupted or (self.options.repeat > 1 and self.fail_count):
+                    return
+                res = await test.run(self)
+                self.process_test_result(res)
+                maxfail = self.options.maxfail
+                if maxfail and self.fail_count >= maxfail and res.res.is_bad():
+                    cancel_all_tests()
+
+        def test_done(f: asyncio.Future) -> None:
+            if not f.cancelled():
+                f.result()
+            futures.remove(f)
+            try:
+                del running_tests[f]
+            except KeyError:
+                pass
+
+        def cancel_one_test(warn: bool) -> None:
+            future = futures.popleft()
+            futures.append(future)
+            if warn:
+                self.flush_logfiles()
+                mlog.warning('CTRL-C detected, interrupting {}'.format(running_tests[future]))
+            del running_tests[future]
+            future.cancel()
+
+        def cancel_all_tests() -> None:
+            nonlocal interrupted
+            interrupted = True
+            while running_tests:
+                cancel_one_test(False)
+
+        def sigterm_handler() -> None:
+            if interrupted:
+                return
+            self.flush_logfiles()
+            mlog.warning('Received SIGTERM, exiting')
+            cancel_all_tests()
+
+        def sigint_handler() -> None:
+            # We always pick the longest-running future that has not been cancelled
+            # If all the tests have been CTRL-C'ed, just stop
+            nonlocal interrupted
+            if interrupted:
+                return
+            ctrlc_times.append(loop.time())
+            if len(ctrlc_times) == MAX_CTRLC and ctrlc_times[-1] - ctrlc_times[0] < 1:
+                self.flush_logfiles()
+                mlog.warning('CTRL-C detected, exiting')
+                cancel_all_tests()
+            elif running_tests:
+                cancel_one_test(True)
+            else:
+                self.flush_logfiles()
+                mlog.warning('CTRL-C detected, exiting')
+                interrupted = True
+
+        for l in self.loggers:
+            l.start(self)
+
+        if sys.platform != 'win32':
+            if os.getpgid(0) == os.getpid():
+                loop.add_signal_handler(signal.SIGINT, sigint_handler)
+            else:
+                loop.add_signal_handler(signal.SIGINT, sigterm_handler)
+            loop.add_signal_handler(signal.SIGTERM, sigterm_handler)
+        try:
+            for runner in runners:
+                if not runner.is_parallel:
+                    await complete_all(futures)
+                future = asyncio.ensure_future(run_test(runner))
+                futures.append(future)
+                running_tests[future] = runner.visible_name
+                future.add_done_callback(test_done)
+                if not runner.is_parallel:
+                    await complete(future)
+                if self.options.repeat > 1 and self.fail_count:
+                    break
+
+            await complete_all(futures)
+        finally:
+            if sys.platform != 'win32':
+                loop.remove_signal_handler(signal.SIGINT)
+                loop.remove_signal_handler(signal.SIGTERM)
+            for l in self.loggers:
+                await l.finish(self)
+
+def list_tests(th: TestHarness) -> bool:
+    tests = th.get_tests(errorfile=sys.stderr)
+    for t in tests:
+        print(th.get_pretty_suite(t))
+    return not tests
+
+def rebuild_deps(ninja: T.List[str], wd: str, tests: T.List[TestSerialisation]) -> bool:
+    def convert_path_to_target(path: str) -> str:
+        path = os.path.relpath(path, wd)
+        if os.sep != '/':
+            path = path.replace(os.sep, '/')
+        return path
+
+    assert len(ninja) > 0
+
+    depends = set()        # type: T.Set[str]
+    targets = set()        # type: T.Set[str]
+    intro_targets = {}     # type: T.Dict[str, T.List[str]]
+    for target in load_info_file(get_infodir(wd), kind='targets'):
+        intro_targets[target['id']] = [
+            convert_path_to_target(f)
+            for f in target['filename']]
+    for t in tests:
+        for d in t.depends:
+            if d in depends:
+                continue
+            depends.update(d)
+            targets.update(intro_targets[d])
+
+    ret = subprocess.run(ninja + ['-C', wd] + sorted(targets)).returncode
+    if ret != 0:
+        print(f'Could not rebuild {wd}')
+        return False
+
+    return True
+
+def run(options: argparse.Namespace) -> int:
+    if options.benchmark:
+        options.num_processes = 1
+
+    if options.verbose and options.quiet:
+        print('Can not be both quiet and verbose at the same time.')
+        return 1
+
+    check_bin = None
+    if options.gdb:
+        options.verbose = True
+        if options.wrapper:
+            print('Must not specify both a wrapper and gdb at the same time.')
+            return 1
+        check_bin = 'gdb'
+
+    if options.wrapper:
+        check_bin = options.wrapper[0]
+
+    if check_bin is not None:
+        exe = ExternalProgram(check_bin, silent=True)
+        if not exe.found():
+            print(f'Could not find requested program: {check_bin!r}')
+            return 1
+
+    b = build.load(options.wd)
+    need_vsenv = T.cast('bool', b.environment.coredata.get_option(OptionKey('vsenv')))
+    setup_vsenv(need_vsenv)
+
+    if not options.no_rebuild:
+        backend = b.environment.coredata.get_option(OptionKey('backend'))
+        if backend == 'none':
+            # nothing to build...
+            options.no_rebuild = True
+        elif backend != 'ninja':
+            print('Only ninja backend is supported to rebuild tests before running them.')
+            # Disable, no point in trying to build anything later
+            options.no_rebuild = True
+
+    with TestHarness(options) as th:
+        try:
+            if options.list:
+                return list_tests(th)
+            return th.doit()
+        except TestException as e:
+            print('Meson test encountered an error:\n')
+            if os.environ.get('MESON_FORCE_BACKTRACE'):
+                raise e
+            else:
+                print(e)
+            return 1
+
+def run_with_args(args: T.List[str]) -> int:
+    parser = argparse.ArgumentParser(prog='meson test')
+    add_arguments(parser)
+    options = parser.parse_args(args)
+    return run(options)
diff --git a/vendored-meson/meson/mesonbuild/munstable_coredata.py b/vendored-meson/meson/mesonbuild/munstable_coredata.py
new file mode 100644
index 000000000000..e6c543b81bb8
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/munstable_coredata.py
@@ -0,0 +1,115 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+
+from . import coredata as cdata
+from .mesonlib import MachineChoice, OptionKey
+
+import os.path
+import pprint
+import textwrap
+
+def add_arguments(parser):
+    parser.add_argument('--all', action='store_true', dest='all', default=False,
+                        help='Show data not used by current backend.')
+
+    parser.add_argument('builddir', nargs='?', default='.', help='The build directory')
+
+
+def dump_compilers(compilers):
+    for lang, compiler in compilers.items():
+        print('  ' + lang + ':')
+        print('      Id: ' + compiler.id)
+        print('      Command: ' + ' '.join(compiler.exelist))
+        if compiler.full_version:
+            print('      Full version: ' + compiler.full_version)
+        if compiler.version:
+            print('      Detected version: ' + compiler.version)
+
+
+def dump_guids(d):
+    for name, value in d.items():
+        print('  ' + name + ': ' + value)
+
+
+def run(options):
+    datadir = 'meson-private'
+    if options.builddir is not None:
+        datadir = os.path.join(options.builddir, datadir)
+    if not os.path.isdir(datadir):
+        print('Current directory is not a build dir. Please specify it or '
+              'change the working directory to it.')
+        return 1
+
+    all_backends = options.all
+
+    print('This is a dump of the internal unstable cache of meson. This is for debugging only.')
+    print('Do NOT parse, this will change from version to version in incompatible ways')
+    print('')
+
+    coredata = cdata.load(options.builddir)
+    backend = coredata.get_option(OptionKey('backend'))
+    for k, v in sorted(coredata.__dict__.items()):
+        if k in {'backend_options', 'base_options', 'builtins', 'compiler_options', 'user_options'}:
+            # use `meson configure` to view these
+            pass
+        elif k in {'install_guid', 'test_guid', 'regen_guid'}:
+            if all_backends or backend.startswith('vs'):
+                print(k + ': ' + v)
+        elif k == 'target_guids':
+            if all_backends or backend.startswith('vs'):
+                print(k + ':')
+                dump_guids(v)
+        elif k == 'lang_guids':
+            if all_backends or backend.startswith('vs') or backend == 'xcode':
+                print(k + ':')
+                dump_guids(v)
+        elif k == 'meson_command':
+            if all_backends or backend.startswith('vs'):
+                print('Meson command used in build file regeneration: ' + ' '.join(v))
+        elif k == 'pkgconf_envvar':
+            print('Last seen PKGCONFIG environment variable value: ' + v)
+        elif k == 'version':
+            print('Meson version: ' + v)
+        elif k == 'cross_files':
+            if v:
+                print('Cross File: ' + ' '.join(v))
+        elif k == 'config_files':
+            if v:
+                print('Native File: ' + ' '.join(v))
+        elif k == 'compilers':
+            for for_machine in MachineChoice:
+                print('Cached {} machine compilers:'.format(
+                    for_machine.get_lower_case_name()))
+                dump_compilers(v[for_machine])
+        elif k == 'deps':
+            def print_dep(dep_key, dep):
+                print('  ' + dep_key[0][1] + ": ")
+                print('      compile args: ' + repr(dep.get_compile_args()))
+                print('      link args: ' + repr(dep.get_link_args()))
+                if dep.get_sources():
+                    print('      sources: ' + repr(dep.get_sources()))
+                print('      version: ' + repr(dep.get_version()))
+
+            for for_machine in iter(MachineChoice):
+                items_list = sorted(v[for_machine].items())
+                if items_list:
+                    print(f'Cached dependencies for {for_machine.get_lower_case_name()} machine')
+                    for dep_key, deps in items_list:
+                        for dep in deps:
+                            print_dep(dep_key, dep)
+        else:
+            print(k + ':')
+            print(textwrap.indent(pprint.pformat(v), '  '))
diff --git a/vendored-meson/meson/mesonbuild/optinterpreter.py b/vendored-meson/meson/mesonbuild/optinterpreter.py
new file mode 100644
index 000000000000..8377614fe4dd
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/optinterpreter.py
@@ -0,0 +1,279 @@
+# Copyright 2013-2014 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import re
+import typing as T
+
+from . import coredata
+from . import mesonlib
+from . import mparser
+from . import mlog
+from .interpreterbase import FeatureNew, typed_pos_args, typed_kwargs, ContainerTypeInfo, KwargInfo
+from .interpreter.type_checking import NoneType, in_set_validator
+
+if T.TYPE_CHECKING:
+    from .interpreterbase import TYPE_var, TYPE_kwargs
+    from .interpreterbase import SubProject
+    from typing_extensions import TypedDict, Literal
+
+    _DEPRECATED_ARGS = T.Union[bool, str, T.Dict[str, str], T.List[str]]
+
+    FuncOptionArgs = TypedDict('FuncOptionArgs', {
+        'type': str,
+        'description': str,
+        'yield': bool,
+        'choices': T.Optional[T.List[str]],
+        'value': object,
+        'min': T.Optional[int],
+        'max': T.Optional[int],
+        'deprecated': _DEPRECATED_ARGS,
+        })
+
+    class StringArgs(TypedDict):
+        value: str
+
+    class BooleanArgs(TypedDict):
+        value: bool
+
+    class ComboArgs(TypedDict):
+        value: str
+        choices: T.List[str]
+
+    class IntegerArgs(TypedDict):
+        value: int
+        min: T.Optional[int]
+        max: T.Optional[int]
+
+    class StringArrayArgs(TypedDict):
+        value: T.Optional[T.Union[str, T.List[str]]]
+        choices: T.List[str]
+
+    class FeatureArgs(TypedDict):
+        value: Literal['enabled', 'disabled', 'auto']
+        choices: T.List[str]
+
+
+class OptionException(mesonlib.MesonException):
+    pass
+
+
+optname_regex = re.compile('[^a-zA-Z0-9_-]')
+
+
+class OptionInterpreter:
+    def __init__(self, subproject: 'SubProject') -> None:
+        self.options: 'coredata.MutableKeyedOptionDictType' = {}
+        self.subproject = subproject
+        self.option_types: T.Dict[str, T.Callable[..., coredata.UserOption]] = {
+            'string': self.string_parser,
+            'boolean': self.boolean_parser,
+            'combo': self.combo_parser,
+            'integer': self.integer_parser,
+            'array': self.string_array_parser,
+            'feature': self.feature_parser,
+        }
+
+    def process(self, option_file: str) -> None:
+        try:
+            with open(option_file, encoding='utf-8') as f:
+                ast = mparser.Parser(f.read(), option_file).parse()
+        except mesonlib.MesonException as me:
+            me.file = option_file
+            raise me
+        if not isinstance(ast, mparser.CodeBlockNode):
+            e = OptionException('Option file is malformed.')
+            e.lineno = ast.lineno()
+            e.file = option_file
+            raise e
+        for cur in ast.lines:
+            try:
+                self.current_node = cur
+                self.evaluate_statement(cur)
+            except mesonlib.MesonException as e:
+                e.lineno = cur.lineno
+                e.colno = cur.colno
+                e.file = option_file
+                raise e
+            except Exception as e:
+                raise mesonlib.MesonException(
+                    str(e), lineno=cur.lineno, colno=cur.colno, file=option_file)
+
+    def reduce_single(self, arg: T.Union[str, mparser.BaseNode]) -> 'TYPE_var':
+        if isinstance(arg, str):
+            return arg
+        elif isinstance(arg, (mparser.StringNode, mparser.BooleanNode,
+                              mparser.NumberNode)):
+            return arg.value
+        elif isinstance(arg, mparser.ArrayNode):
+            return [self.reduce_single(curarg) for curarg in arg.args.arguments]
+        elif isinstance(arg, mparser.DictNode):
+            d = {}
+            for k, v in arg.args.kwargs.items():
+                if not isinstance(k, mparser.StringNode):
+                    raise OptionException('Dictionary keys must be a string literal')
+                d[k.value] = self.reduce_single(v)
+            return d
+        elif isinstance(arg, mparser.UMinusNode):
+            res = self.reduce_single(arg.value)
+            if not isinstance(res, (int, float)):
+                raise OptionException('Token after "-" is not a number')
+            FeatureNew.single_use('negative numbers in meson_options.txt', '0.54.1', self.subproject)
+            return -res
+        elif isinstance(arg, mparser.NotNode):
+            res = self.reduce_single(arg.value)
+            if not isinstance(res, bool):
+                raise OptionException('Token after "not" is not a a boolean')
+            FeatureNew.single_use('negation ("not") in meson_options.txt', '0.54.1', self.subproject)
+            return not res
+        elif isinstance(arg, mparser.ArithmeticNode):
+            l = self.reduce_single(arg.left)
+            r = self.reduce_single(arg.right)
+            if not (arg.operation == 'add' and isinstance(l, str) and isinstance(r, str)):
+                raise OptionException('Only string concatenation with the "+" operator is allowed')
+            FeatureNew.single_use('string concatenation in meson_options.txt', '0.55.0', self.subproject)
+            return l + r
+        else:
+            raise OptionException('Arguments may only be string, int, bool, or array of those.')
+
+    def reduce_arguments(self, args: mparser.ArgumentNode) -> T.Tuple['TYPE_var', 'TYPE_kwargs']:
+        if args.incorrect_order():
+            raise OptionException('All keyword arguments must be after positional arguments.')
+        reduced_pos = [self.reduce_single(arg) for arg in args.arguments]
+        reduced_kw = {}
+        for key in args.kwargs.keys():
+            if not isinstance(key, mparser.IdNode):
+                raise OptionException('Keyword argument name is not a string.')
+            a = args.kwargs[key]
+            reduced_kw[key.value] = self.reduce_single(a)
+        return reduced_pos, reduced_kw
+
+    def evaluate_statement(self, node: mparser.BaseNode) -> None:
+        if not isinstance(node, mparser.FunctionNode):
+            raise OptionException('Option file may only contain option definitions')
+        func_name = node.func_name
+        if func_name != 'option':
+            raise OptionException('Only calls to option() are allowed in option files.')
+        (posargs, kwargs) = self.reduce_arguments(node.args)
+        self.func_option(posargs, kwargs)
+
+    @typed_kwargs(
+        'option',
+        KwargInfo(
+            'type',
+            str,
+            required=True,
+            validator=in_set_validator({'string', 'boolean', 'integer', 'combo', 'array', 'feature'})
+        ),
+        KwargInfo('description', str, default=''),
+        KwargInfo(
+            'deprecated',
+            (bool, str, ContainerTypeInfo(dict, str), ContainerTypeInfo(list, str)),
+            default=False,
+            since='0.60.0',
+            since_values={str: '0.63.0'},
+        ),
+        KwargInfo('yield', bool, default=coredata.DEFAULT_YIELDING, since='0.45.0'),
+        allow_unknown=True,
+    )
+    @typed_pos_args('option', str)
+    def func_option(self, args: T.Tuple[str], kwargs: 'FuncOptionArgs') -> None:
+        opt_name = args[0]
+        if optname_regex.search(opt_name) is not None:
+            raise OptionException('Option names can only contain letters, numbers or dashes.')
+        key = mesonlib.OptionKey.from_string(opt_name).evolve(subproject=self.subproject)
+        if not key.is_project():
+            raise OptionException('Option name %s is reserved.' % opt_name)
+
+        opt_type = kwargs['type']
+        parser = self.option_types[opt_type]
+        description = kwargs['description'] or opt_name
+
+        # Drop the arguments we've already consumed
+        n_kwargs = {k: v for k, v in kwargs.items()
+                    if k not in {'type', 'description', 'deprecated', 'yield'}}
+
+        opt = parser(description, (kwargs['yield'], kwargs['deprecated']), n_kwargs)
+        if key in self.options:
+            mlog.deprecation(f'Option {opt_name} already exists.')
+        self.options[key] = opt
+
+    @typed_kwargs(
+        'string option',
+        KwargInfo('value', str, default=''),
+    )
+    def string_parser(self, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: StringArgs) -> coredata.UserOption:
+        return coredata.UserStringOption(description, kwargs['value'], *args)
+
+    @typed_kwargs(
+        'boolean option',
+        KwargInfo(
+            'value',
+            (bool, str),
+            default=True,
+            validator=lambda x: None if isinstance(x, bool) or x in {'true', 'false'} else 'boolean options must have boolean values',
+            deprecated_values={str: ('1.1.0', 'use a boolean, not a string')},
+        ),
+    )
+    def boolean_parser(self, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: BooleanArgs) -> coredata.UserOption:
+        return coredata.UserBooleanOption(description, kwargs['value'], *args)
+
+    @typed_kwargs(
+        'combo option',
+        KwargInfo('value', (str, NoneType)),
+        KwargInfo('choices', ContainerTypeInfo(list, str, allow_empty=False), required=True),
+    )
+    def combo_parser(self, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: ComboArgs) -> coredata.UserOption:
+        choices = kwargs['choices']
+        value = kwargs['value']
+        if value is None:
+            value = kwargs['choices'][0]
+        return coredata.UserComboOption(description, choices, value, *args)
+
+    @typed_kwargs(
+        'integer option',
+        KwargInfo(
+            'value',
+            (int, str),
+            default=True,
+            deprecated_values={str: ('1.1.0', 'use an integer, not a string')},
+            convertor=int,
+        ),
+        KwargInfo('min', (int, NoneType)),
+        KwargInfo('max', (int, NoneType)),
+    )
+    def integer_parser(self, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: IntegerArgs) -> coredata.UserOption:
+        value = kwargs['value']
+        inttuple = (kwargs['min'], kwargs['max'], value)
+        return coredata.UserIntegerOption(description, inttuple, *args)
+
+    @typed_kwargs(
+        'string array option',
+        KwargInfo('value', (ContainerTypeInfo(list, str), str, NoneType)),
+        KwargInfo('choices', ContainerTypeInfo(list, str), default=[]),
+    )
+    def string_array_parser(self, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: StringArrayArgs) -> coredata.UserOption:
+        choices = kwargs['choices']
+        value = kwargs['value'] if kwargs['value'] is not None else choices
+        return coredata.UserArrayOption(description, value,
+                                        choices=choices,
+                                        yielding=args[0],
+                                        deprecated=args[1])
+
+    @typed_kwargs(
+        'feature option',
+        KwargInfo('value', str, default='auto', validator=in_set_validator({'auto', 'enabled', 'disabled'})),
+    )
+    def feature_parser(self, description: str, args: T.Tuple[bool, _DEPRECATED_ARGS], kwargs: FeatureArgs) -> coredata.UserOption:
+        return coredata.UserFeatureOption(description, kwargs['value'], *args)
diff --git a/vendored-meson/meson/mesonbuild/programs.py b/vendored-meson/meson/mesonbuild/programs.py
new file mode 100644
index 000000000000..9bf1844c9d85
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/programs.py
@@ -0,0 +1,378 @@
+# Copyright 2013-2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Representations and logic for External and Internal Programs."""
+
+import functools
+import os
+import shutil
+import stat
+import sys
+import re
+import typing as T
+from pathlib import Path
+
+from . import mesonlib
+from . import mlog
+from .mesonlib import MachineChoice, OrderedSet
+
+if T.TYPE_CHECKING:
+    from .environment import Environment
+    from .interpreter import Interpreter
+
+
+class ExternalProgram(mesonlib.HoldableObject):
+
+    """A program that is found on the system."""
+
+    windows_exts = ('exe', 'msc', 'com', 'bat', 'cmd')
+    for_machine = MachineChoice.BUILD
+
+    def __init__(self, name: str, command: T.Optional[T.List[str]] = None,
+                 silent: bool = False, search_dir: T.Optional[str] = None,
+                 extra_search_dirs: T.Optional[T.List[str]] = None):
+        self.name = name
+        self.path: T.Optional[str] = None
+        self.cached_version: T.Optional[str] = None
+        if command is not None:
+            self.command = mesonlib.listify(command)
+            if mesonlib.is_windows():
+                cmd = self.command[0]
+                args = self.command[1:]
+                # Check whether the specified cmd is a path to a script, in
+                # which case we need to insert the interpreter. If not, try to
+                # use it as-is.
+                ret = self._shebang_to_cmd(cmd)
+                if ret:
+                    self.command = ret + args
+                else:
+                    self.command = [cmd] + args
+        else:
+            all_search_dirs = [search_dir]
+            if extra_search_dirs:
+                all_search_dirs += extra_search_dirs
+            for d in all_search_dirs:
+                self.command = self._search(name, d)
+                if self.found():
+                    break
+
+        if self.found():
+            # Set path to be the last item that is actually a file (in order to
+            # skip options in something like ['python', '-u', 'file.py']. If we
+            # can't find any components, default to the last component of the path.
+            for arg in reversed(self.command):
+                if arg is not None and os.path.isfile(arg):
+                    self.path = arg
+                    break
+            else:
+                self.path = self.command[-1]
+
+        if not silent:
+            # ignore the warning because derived classes never call this __init__
+            # method, and thus only the found() method of this class is ever executed
+            if self.found():  # lgtm [py/init-calls-subclass]
+                mlog.log('Program', mlog.bold(name), 'found:', mlog.green('YES'),
+                         '(%s)' % ' '.join(self.command))
+            else:
+                mlog.log('Program', mlog.bold(name), 'found:', mlog.red('NO'))
+
+    def summary_value(self) -> T.Union[str, mlog.AnsiDecorator]:
+        if not self.found():
+            return mlog.red('NO')
+        return self.path
+
+    def __repr__(self) -> str:
+        r = '<{} {!r} -> {!r}>'
+        return r.format(self.__class__.__name__, self.name, self.command)
+
+    def description(self) -> str:
+        '''Human friendly description of the command'''
+        return ' '.join(self.command)
+
+    def get_version(self, interpreter: T.Optional['Interpreter'] = None) -> str:
+        if not self.cached_version:
+            from . import build
+            raw_cmd = self.get_command() + ['--version']
+            if interpreter:
+                res = interpreter.run_command_impl(interpreter.current_node, (self, ['--version']),
+                                                   {'capture': True,
+                                                    'check': True,
+                                                    'env': build.EnvironmentVariables()},
+                                                   True)
+                o, e = res.stdout, res.stderr
+            else:
+                p, o, e = mesonlib.Popen_safe(raw_cmd)
+                if p.returncode != 0:
+                    cmd_str = mesonlib.join_args(raw_cmd)
+                    raise mesonlib.MesonException(f'Command {cmd_str!r} failed with status {p.returncode}.')
+            output = o.strip()
+            if not output:
+                output = e.strip()
+            match = re.search(r'([0-9][0-9\.]+)', output)
+            if not match:
+                raise mesonlib.MesonException(f'Could not find a version number in output of {raw_cmd!r}')
+            self.cached_version = match.group(1)
+        return self.cached_version
+
+    @classmethod
+    def from_bin_list(cls, env: 'Environment', for_machine: MachineChoice, name: str) -> 'ExternalProgram':
+        # There is a static `for_machine` for this class because the binary
+        # always runs on the build platform. (It's host platform is our build
+        # platform.) But some external programs have a target platform, so this
+        # is what we are specifying here.
+        command = env.lookup_binary_entry(for_machine, name)
+        if command is None:
+            return NonExistingExternalProgram()
+        return cls.from_entry(name, command)
+
+    @staticmethod
+    @functools.lru_cache(maxsize=None)
+    def _windows_sanitize_path(path: str) -> str:
+        # Ensure that we use USERPROFILE even when inside MSYS, MSYS2, Cygwin, etc.
+        if 'USERPROFILE' not in os.environ:
+            return path
+        # The WindowsApps directory is a bit of a problem. It contains
+        # some zero-sized .exe files which have "reparse points", that
+        # might either launch an installed application, or might open
+        # a page in the Windows Store to download the application.
+        #
+        # To handle the case where the python interpreter we're
+        # running on came from the Windows Store, if we see the
+        # WindowsApps path in the search path, replace it with
+        # dirname(sys.executable).
+        appstore_dir = Path(os.environ['USERPROFILE']) / 'AppData' / 'Local' / 'Microsoft' / 'WindowsApps'
+        paths = []
+        for each in path.split(os.pathsep):
+            if Path(each) != appstore_dir:
+                paths.append(each)
+            elif 'WindowsApps' in sys.executable:
+                paths.append(os.path.dirname(sys.executable))
+        return os.pathsep.join(paths)
+
+    @staticmethod
+    def from_entry(name: str, command: T.Union[str, T.List[str]]) -> 'ExternalProgram':
+        if isinstance(command, list):
+            if len(command) == 1:
+                command = command[0]
+        # We cannot do any searching if the command is a list, and we don't
+        # need to search if the path is an absolute path.
+        if isinstance(command, list) or os.path.isabs(command):
+            if isinstance(command, str):
+                command = [command]
+            return ExternalProgram(name, command=command, silent=True)
+        assert isinstance(command, str)
+        # Search for the command using the specified string!
+        return ExternalProgram(command, silent=True)
+
+    @staticmethod
+    def _shebang_to_cmd(script: str) -> T.Optional[T.List[str]]:
+        """
+        Check if the file has a shebang and manually parse it to figure out
+        the interpreter to use. This is useful if the script is not executable
+        or if we're on Windows (which does not understand shebangs).
+        """
+        try:
+            with open(script, encoding='utf-8') as f:
+                first_line = f.readline().strip()
+            if first_line.startswith('#!'):
+                # In a shebang, everything before the first space is assumed to
+                # be the command to run and everything after the first space is
+                # the single argument to pass to that command. So we must split
+                # exactly once.
+                commands = first_line[2:].split('#')[0].strip().split(maxsplit=1)
+                if mesonlib.is_windows():
+                    # Windows does not have UNIX paths so remove them,
+                    # but don't remove Windows paths
+                    if commands[0].startswith('/'):
+                        commands[0] = commands[0].split('/')[-1]
+                    if len(commands) > 0 and commands[0] == 'env':
+                        commands = commands[1:]
+                    # Windows does not ship python3.exe, but we know the path to it
+                    if len(commands) > 0 and commands[0] == 'python3':
+                        commands = mesonlib.python_command + commands[1:]
+                elif mesonlib.is_haiku():
+                    # Haiku does not have /usr, but a lot of scripts assume that
+                    # /usr/bin/env always exists. Detect that case and run the
+                    # script with the interpreter after it.
+                    if commands[0] == '/usr/bin/env':
+                        commands = commands[1:]
+                    # We know what python3 is, we're running on it
+                    if len(commands) > 0 and commands[0] == 'python3':
+                        commands = mesonlib.python_command + commands[1:]
+                else:
+                    # Replace python3 with the actual python3 that we are using
+                    if commands[0] == '/usr/bin/env' and commands[1] == 'python3':
+                        commands = mesonlib.python_command + commands[2:]
+                    elif commands[0].split('/')[-1] == 'python3':
+                        commands = mesonlib.python_command + commands[1:]
+                return commands + [script]
+        except Exception as e:
+            mlog.debug(str(e))
+        mlog.debug(f'Unusable script {script!r}')
+        return None
+
+    def _is_executable(self, path: str) -> bool:
+        suffix = os.path.splitext(path)[-1].lower()[1:]
+        execmask = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
+        if mesonlib.is_windows():
+            if suffix in self.windows_exts:
+                return True
+        elif os.stat(path).st_mode & execmask:
+            return not os.path.isdir(path)
+        return False
+
+    def _search_dir(self, name: str, search_dir: T.Optional[str]) -> T.Optional[list]:
+        if search_dir is None:
+            return None
+        trial = os.path.join(search_dir, name)
+        if os.path.exists(trial):
+            if self._is_executable(trial):
+                return [trial]
+            # Now getting desperate. Maybe it is a script file that is
+            # a) not chmodded executable, or
+            # b) we are on windows so they can't be directly executed.
+            return self._shebang_to_cmd(trial)
+        else:
+            if mesonlib.is_windows():
+                for ext in self.windows_exts:
+                    trial_ext = f'{trial}.{ext}'
+                    if os.path.exists(trial_ext):
+                        return [trial_ext]
+        return None
+
+    def _search_windows_special_cases(self, name: str, command: str) -> T.List[T.Optional[str]]:
+        '''
+        Lots of weird Windows quirks:
+        1. PATH search for @name returns files with extensions from PATHEXT,
+           but only self.windows_exts are executable without an interpreter.
+        2. @name might be an absolute path to an executable, but without the
+           extension. This works inside MinGW so people use it a lot.
+        3. The script is specified without an extension, in which case we have
+           to manually search in PATH.
+        4. More special-casing for the shebang inside the script.
+        '''
+        if command:
+            # On Windows, even if the PATH search returned a full path, we can't be
+            # sure that it can be run directly if it's not a native executable.
+            # For instance, interpreted scripts sometimes need to be run explicitly
+            # with an interpreter if the file association is not done properly.
+            name_ext = os.path.splitext(command)[1]
+            if name_ext[1:].lower() in self.windows_exts:
+                # Good, it can be directly executed
+                return [command]
+            # Try to extract the interpreter from the shebang
+            commands = self._shebang_to_cmd(command)
+            if commands:
+                return commands
+            return [None]
+        # Maybe the name is an absolute path to a native Windows
+        # executable, but without the extension. This is technically wrong,
+        # but many people do it because it works in the MinGW shell.
+        if os.path.isabs(name):
+            for ext in self.windows_exts:
+                command = f'{name}.{ext}'
+                if os.path.exists(command):
+                    return [command]
+        # On Windows, interpreted scripts must have an extension otherwise they
+        # cannot be found by a standard PATH search. So we do a custom search
+        # where we manually search for a script with a shebang in PATH.
+        search_dirs = self._windows_sanitize_path(os.environ.get('PATH', '')).split(';')
+        for search_dir in search_dirs:
+            commands = self._search_dir(name, search_dir)
+            if commands:
+                return commands
+        return [None]
+
+    def _search(self, name: str, search_dir: T.Optional[str]) -> T.List[T.Optional[str]]:
+        '''
+        Search in the specified dir for the specified executable by name
+        and if not found search in PATH
+        '''
+        commands = self._search_dir(name, search_dir)
+        if commands:
+            return commands
+        # If there is a directory component, do not look in PATH
+        if os.path.dirname(name) and not os.path.isabs(name):
+            return [None]
+        # Do a standard search in PATH
+        path = os.environ.get('PATH', None)
+        if mesonlib.is_windows() and path:
+            path = self._windows_sanitize_path(path)
+        command = shutil.which(name, path=path)
+        if mesonlib.is_windows():
+            return self._search_windows_special_cases(name, command)
+        # On UNIX-like platforms, shutil.which() is enough to find
+        # all executables whether in PATH or with an absolute path
+        return [command]
+
+    def found(self) -> bool:
+        return self.command[0] is not None
+
+    def get_command(self) -> T.List[str]:
+        return self.command[:]
+
+    def get_path(self) -> T.Optional[str]:
+        return self.path
+
+    def get_name(self) -> str:
+        return self.name
+
+
+class NonExistingExternalProgram(ExternalProgram):  # lgtm [py/missing-call-to-init]
+    "A program that will never exist"
+
+    def __init__(self, name: str = 'nonexistingprogram') -> None:
+        self.name = name
+        self.command = [None]
+        self.path = None
+
+    def __repr__(self) -> str:
+        r = '<{} {!r} -> {!r}>'
+        return r.format(self.__class__.__name__, self.name, self.command)
+
+    def found(self) -> bool:
+        return False
+
+
+class OverrideProgram(ExternalProgram):
+
+    """A script overriding a program."""
+
+
+def find_external_program(env: 'Environment', for_machine: MachineChoice, name: str,
+                          display_name: str, default_names: T.List[str],
+                          allow_default_for_cross: bool = True) -> T.Generator['ExternalProgram', None, None]:
+    """Find an external program, checking the cross file plus any default options."""
+    potential_names = OrderedSet(default_names)
+    potential_names.add(name)
+    # Lookup in cross or machine file.
+    for potential_name in potential_names:
+        potential_cmd = env.lookup_binary_entry(for_machine, potential_name)
+        if potential_cmd is not None:
+            mlog.debug(f'{display_name} binary for {for_machine} specified from cross file, native file, '
+                       f'or env var as {potential_cmd}')
+            yield ExternalProgram.from_entry(potential_name, potential_cmd)
+            # We never fallback if the user-specified option is no good, so
+            # stop returning options.
+            return
+    mlog.debug(f'{display_name} binary missing from cross or native file, or env var undefined.')
+    # Fallback on hard-coded defaults, if a default binary is allowed for use
+    # with cross targets, or if this is not a cross target
+    if allow_default_for_cross or not (for_machine is MachineChoice.HOST and env.is_cross_build(for_machine)):
+        for potential_path in default_names:
+            mlog.debug(f'Trying a default {display_name} fallback at', potential_path)
+            yield ExternalProgram(potential_path, silent=True)
+    else:
+        mlog.debug('Default target is not allowed for cross use')
diff --git a/vendored-meson/meson/mesonbuild/rewriter.py b/vendored-meson/meson/mesonbuild/rewriter.py
new file mode 100644
index 000000000000..a9b2e881c356
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/rewriter.py
@@ -0,0 +1,1070 @@
+#!/usr/bin/env python3
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This class contains the basic functionality needed to run any interpreter
+# or an interpreter-based tool.
+
+# This tool is used to manipulate an existing Meson build definition.
+#
+# - add a file to a target
+# - remove files from a target
+# - move targets
+# - reindent?
+from __future__ import annotations
+
+from .ast import IntrospectionInterpreter, BUILD_TARGET_FUNCTIONS, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstPrinter
+from mesonbuild.mesonlib import MesonException, setup_vsenv
+from . import mlog, environment
+from functools import wraps
+from .mparser import Token, ArrayNode, ArgumentNode, AssignmentNode, BooleanNode, ElementaryNode, IdNode, FunctionNode, StringNode
+import json, os, re, sys
+import typing as T
+
+if T.TYPE_CHECKING:
+    from .mparser import BaseNode
+
+class RewriterException(MesonException):
+    pass
+
+def add_arguments(parser, formatter=None):
+    parser.add_argument('-s', '--sourcedir', type=str, default='.', metavar='SRCDIR', help='Path to source directory.')
+    parser.add_argument('-V', '--verbose', action='store_true', default=False, help='Enable verbose output')
+    parser.add_argument('-S', '--skip-errors', dest='skip', action='store_true', default=False, help='Skip errors instead of aborting')
+    subparsers = parser.add_subparsers(dest='type', title='Rewriter commands', description='Rewrite command to execute')
+
+    # Target
+    tgt_parser = subparsers.add_parser('target', aliases=['tgt'], help='Modify a target', formatter_class=formatter)
+    tgt_parser.add_argument('-s', '--subdir', default='', dest='subdir', help='Subdirectory of the new target (only for the "add_target" action)')
+    tgt_parser.add_argument('--type', dest='tgt_type', choices=rewriter_keys['target']['target_type'][2], default='executable',
+                            help='Type of the target to add (only for the "add_target" action)')
+    tgt_parser.add_argument('target', help='Name or ID of the target')
+    tgt_parser.add_argument('operation', choices=['add', 'rm', 'add_target', 'rm_target', 'add_extra_files', 'rm_extra_files', 'info'],
+                            help='Action to execute')
+    tgt_parser.add_argument('sources', nargs='*', help='Sources to add/remove')
+
+    # KWARGS
+    kw_parser = subparsers.add_parser('kwargs', help='Modify keyword arguments', formatter_class=formatter)
+    kw_parser.add_argument('operation', choices=rewriter_keys['kwargs']['operation'][2],
+                           help='Action to execute')
+    kw_parser.add_argument('function', choices=list(rewriter_func_kwargs.keys()),
+                           help='Function type to modify')
+    kw_parser.add_argument('id', help='ID of the function to modify (can be anything for "project")')
+    kw_parser.add_argument('kwargs', nargs='*', help='Pairs of keyword and value')
+
+    # Default options
+    def_parser = subparsers.add_parser('default-options', aliases=['def'], help='Modify the project default options', formatter_class=formatter)
+    def_parser.add_argument('operation', choices=rewriter_keys['default_options']['operation'][2],
+                            help='Action to execute')
+    def_parser.add_argument('options', nargs='*', help='Key, value pairs of configuration option')
+
+    # JSON file/command
+    cmd_parser = subparsers.add_parser('command', aliases=['cmd'], help='Execute a JSON array of commands', formatter_class=formatter)
+    cmd_parser.add_argument('json', help='JSON string or file to execute')
+
+class RequiredKeys:
+    def __init__(self, keys):
+        self.keys = keys
+
+    def __call__(self, f):
+        @wraps(f)
+        def wrapped(*wrapped_args, **wrapped_kwargs):
+            assert len(wrapped_args) >= 2
+            cmd = wrapped_args[1]
+            for key, val in self.keys.items():
+                typ = val[0] # The type of the value
+                default = val[1] # The default value -- None is required
+                choices = val[2] # Valid choices -- None is for everything
+                if key not in cmd:
+                    if default is not None:
+                        cmd[key] = default
+                    else:
+                        raise RewriterException('Key "{}" is missing in object for {}'
+                                                .format(key, f.__name__))
+                if not isinstance(cmd[key], typ):
+                    raise RewriterException('Invalid type of "{}". Required is {} but provided was {}'
+                                            .format(key, typ.__name__, type(cmd[key]).__name__))
+                if choices is not None:
+                    assert isinstance(choices, list)
+                    if cmd[key] not in choices:
+                        raise RewriterException('Invalid value of "{}": Possible values are {} but provided was "{}"'
+                                                .format(key, choices, cmd[key]))
+            return f(*wrapped_args, **wrapped_kwargs)
+
+        return wrapped
+
+class MTypeBase:
+    def __init__(self, node: T.Optional[BaseNode] = None):
+        if node is None:
+            self.node = self._new_node()  # lgtm [py/init-calls-subclass] (node creation does not depend on base class state)
+        else:
+            self.node = node
+        self.node_type = None
+        for i in self.supported_nodes():  # lgtm [py/init-calls-subclass] (listing nodes does not depend on base class state)
+            if isinstance(self.node, i):
+                self.node_type = i
+
+    def _new_node(self):
+        # Overwrite in derived class
+        raise RewriterException('Internal error: _new_node of MTypeBase was called')
+
+    def can_modify(self):
+        return self.node_type is not None
+
+    def get_node(self):
+        return self.node
+
+    def supported_nodes(self):
+        # Overwrite in derived class
+        return []
+
+    def set_value(self, value):
+        # Overwrite in derived class
+        mlog.warning('Cannot set the value of type', mlog.bold(type(self).__name__), '--> skipping')
+
+    def add_value(self, value):
+        # Overwrite in derived class
+        mlog.warning('Cannot add a value of type', mlog.bold(type(self).__name__), '--> skipping')
+
+    def remove_value(self, value):
+        # Overwrite in derived class
+        mlog.warning('Cannot remove a value of type', mlog.bold(type(self).__name__), '--> skipping')
+
+    def remove_regex(self, value):
+        # Overwrite in derived class
+        mlog.warning('Cannot remove a regex in type', mlog.bold(type(self).__name__), '--> skipping')
+
+class MTypeStr(MTypeBase):
+    def __init__(self, node: T.Optional[BaseNode] = None):
+        super().__init__(node)
+
+    def _new_node(self):
+        return StringNode(Token('', '', 0, 0, 0, None, ''))
+
+    def supported_nodes(self):
+        return [StringNode]
+
+    def set_value(self, value):
+        self.node.value = str(value)
+
+class MTypeBool(MTypeBase):
+    def __init__(self, node: T.Optional[BaseNode] = None):
+        super().__init__(node)
+
+    def _new_node(self):
+        return BooleanNode(Token('', '', 0, 0, 0, None, False))
+
+    def supported_nodes(self):
+        return [BooleanNode]
+
+    def set_value(self, value):
+        self.node.value = bool(value)
+
+class MTypeID(MTypeBase):
+    def __init__(self, node: T.Optional[BaseNode] = None):
+        super().__init__(node)
+
+    def _new_node(self):
+        return IdNode(Token('', '', 0, 0, 0, None, ''))
+
+    def supported_nodes(self):
+        return [IdNode]
+
+    def set_value(self, value):
+        self.node.value = str(value)
+
+class MTypeList(MTypeBase):
+    def __init__(self, node: T.Optional[BaseNode] = None):
+        super().__init__(node)
+
+    def _new_node(self):
+        return ArrayNode(ArgumentNode(Token('', '', 0, 0, 0, None, '')), 0, 0, 0, 0)
+
+    def _new_element_node(self, value):
+        # Overwrite in derived class
+        raise RewriterException('Internal error: _new_element_node of MTypeList was called')
+
+    def _ensure_array_node(self):
+        if not isinstance(self.node, ArrayNode):
+            tmp = self.node
+            self.node = self._new_node()
+            self.node.args.arguments += [tmp]
+
+    def _check_is_equal(self, node, value) -> bool:
+        # Overwrite in derived class
+        return False
+
+    def _check_regex_matches(self, node, regex: str) -> bool:
+        # Overwrite in derived class
+        return False
+
+    def get_node(self):
+        if isinstance(self.node, ArrayNode):
+            if len(self.node.args.arguments) == 1:
+                return self.node.args.arguments[0]
+        return self.node
+
+    def supported_element_nodes(self):
+        # Overwrite in derived class
+        return []
+
+    def supported_nodes(self):
+        return [ArrayNode] + self.supported_element_nodes()
+
+    def set_value(self, value):
+        if not isinstance(value, list):
+            value = [value]
+        self._ensure_array_node()
+        self.node.args.arguments = [] # Remove all current nodes
+        for i in value:
+            self.node.args.arguments += [self._new_element_node(i)]
+
+    def add_value(self, value):
+        if not isinstance(value, list):
+            value = [value]
+        self._ensure_array_node()
+        for i in value:
+            self.node.args.arguments += [self._new_element_node(i)]
+
+    def _remove_helper(self, value, equal_func):
+        def check_remove_node(node):
+            for j in value:
+                if equal_func(i, j):
+                    return True
+            return False
+
+        if not isinstance(value, list):
+            value = [value]
+        self._ensure_array_node()
+        removed_list = []
+        for i in self.node.args.arguments:
+            if not check_remove_node(i):
+                removed_list += [i]
+        self.node.args.arguments = removed_list
+
+    def remove_value(self, value):
+        self._remove_helper(value, self._check_is_equal)
+
+    def remove_regex(self, regex: str):
+        self._remove_helper(regex, self._check_regex_matches)
+
+class MTypeStrList(MTypeList):
+    def __init__(self, node: T.Optional[BaseNode] = None):
+        super().__init__(node)
+
+    def _new_element_node(self, value):
+        return StringNode(Token('', '', 0, 0, 0, None, str(value)))
+
+    def _check_is_equal(self, node, value) -> bool:
+        if isinstance(node, StringNode):
+            return node.value == value
+        return False
+
+    def _check_regex_matches(self, node, regex: str) -> bool:
+        if isinstance(node, StringNode):
+            return re.match(regex, node.value) is not None
+        return False
+
+    def supported_element_nodes(self):
+        return [StringNode]
+
+class MTypeIDList(MTypeList):
+    def __init__(self, node: T.Optional[BaseNode] = None):
+        super().__init__(node)
+
+    def _new_element_node(self, value):
+        return IdNode(Token('', '', 0, 0, 0, None, str(value)))
+
+    def _check_is_equal(self, node, value) -> bool:
+        if isinstance(node, IdNode):
+            return node.value == value
+        return False
+
+    def _check_regex_matches(self, node, regex: str) -> bool:
+        if isinstance(node, StringNode):
+            return re.match(regex, node.value) is not None
+        return False
+
+    def supported_element_nodes(self):
+        return [IdNode]
+
+rewriter_keys = {
+    'default_options': {
+        'operation': (str, None, ['set', 'delete']),
+        'options': (dict, {}, None)
+    },
+    'kwargs': {
+        'function': (str, None, None),
+        'id': (str, None, None),
+        'operation': (str, None, ['set', 'delete', 'add', 'remove', 'remove_regex', 'info']),
+        'kwargs': (dict, {}, None)
+    },
+    'target': {
+        'target': (str, None, None),
+        'operation': (str, None, ['src_add', 'src_rm', 'target_rm', 'target_add', 'extra_files_add', 'extra_files_rm', 'info']),
+        'sources': (list, [], None),
+        'subdir': (str, '', None),
+        'target_type': (str, 'executable', ['both_libraries', 'executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library']),
+    }
+}
+
+rewriter_func_kwargs = {
+    'dependency': {
+        'language': MTypeStr,
+        'method': MTypeStr,
+        'native': MTypeBool,
+        'not_found_message': MTypeStr,
+        'required': MTypeBool,
+        'static': MTypeBool,
+        'version': MTypeStrList,
+        'modules': MTypeStrList
+    },
+    'target': {
+        'build_by_default': MTypeBool,
+        'build_rpath': MTypeStr,
+        'dependencies': MTypeIDList,
+        'gui_app': MTypeBool,
+        'link_with': MTypeIDList,
+        'export_dynamic': MTypeBool,
+        'implib': MTypeBool,
+        'install': MTypeBool,
+        'install_dir': MTypeStr,
+        'install_rpath': MTypeStr,
+        'pie': MTypeBool
+    },
+    'project': {
+        'default_options': MTypeStrList,
+        'meson_version': MTypeStr,
+        'license': MTypeStrList,
+        'subproject_dir': MTypeStr,
+        'version': MTypeStr
+    }
+}
+
+class Rewriter:
+    def __init__(self, sourcedir: str, generator: str = 'ninja', skip_errors: bool = False):
+        self.sourcedir = sourcedir
+        self.interpreter = IntrospectionInterpreter(sourcedir, '', generator, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()])
+        self.skip_errors = skip_errors
+        self.modified_nodes = []
+        self.to_remove_nodes = []
+        self.to_add_nodes = []
+        self.functions = {
+            'default_options': self.process_default_options,
+            'kwargs': self.process_kwargs,
+            'target': self.process_target,
+        }
+        self.info_dump = None
+
+    def analyze_meson(self):
+        mlog.log('Analyzing meson file:', mlog.bold(os.path.join(self.sourcedir, environment.build_filename)))
+        self.interpreter.analyze()
+        mlog.log('  -- Project:', mlog.bold(self.interpreter.project_data['descriptive_name']))
+        mlog.log('  -- Version:', mlog.cyan(self.interpreter.project_data['version']))
+
+    def add_info(self, cmd_type: str, cmd_id: str, data: dict):
+        if self.info_dump is None:
+            self.info_dump = {}
+        if cmd_type not in self.info_dump:
+            self.info_dump[cmd_type] = {}
+        self.info_dump[cmd_type][cmd_id] = data
+
+    def print_info(self):
+        if self.info_dump is None:
+            return
+        sys.stderr.write(json.dumps(self.info_dump, indent=2))
+
+    def on_error(self):
+        if self.skip_errors:
+            return mlog.cyan('-->'), mlog.yellow('skipping')
+        return mlog.cyan('-->'), mlog.red('aborting')
+
+    def handle_error(self):
+        if self.skip_errors:
+            return None
+        raise MesonException('Rewriting the meson.build failed')
+
+    def find_target(self, target: str):
+        def check_list(name: str) -> T.List[BaseNode]:
+            result = []
+            for i in self.interpreter.targets:
+                if name in {i['name'], i['id']}:
+                    result += [i]
+            return result
+
+        targets = check_list(target)
+        if targets:
+            if len(targets) == 1:
+                return targets[0]
+            else:
+                mlog.error('There are multiple targets matching', mlog.bold(target))
+                for i in targets:
+                    mlog.error('  -- Target name', mlog.bold(i['name']), 'with ID', mlog.bold(i['id']))
+                mlog.error('Please try again with the unique ID of the target', *self.on_error())
+                self.handle_error()
+                return None
+
+        # Check the assignments
+        tgt = None
+        if target in self.interpreter.assignments:
+            node = self.interpreter.assignments[target]
+            if isinstance(node, FunctionNode):
+                if node.func_name in {'executable', 'jar', 'library', 'shared_library', 'shared_module', 'static_library', 'both_libraries'}:
+                    tgt = self.interpreter.assign_vals[target]
+
+        return tgt
+
+    def find_dependency(self, dependency: str):
+        def check_list(name: str):
+            for i in self.interpreter.dependencies:
+                if name == i['name']:
+                    return i
+            return None
+
+        dep = check_list(dependency)
+        if dep is not None:
+            return dep
+
+        # Check the assignments
+        if dependency in self.interpreter.assignments:
+            node = self.interpreter.assignments[dependency]
+            if isinstance(node, FunctionNode):
+                if node.func_name == 'dependency':
+                    name = self.interpreter.flatten_args(node.args)[0]
+                    dep = check_list(name)
+
+        return dep
+
+    @RequiredKeys(rewriter_keys['default_options'])
+    def process_default_options(self, cmd):
+        # First, remove the old values
+        kwargs_cmd = {
+            'function': 'project',
+            'id': "/",
+            'operation': 'remove_regex',
+            'kwargs': {
+                'default_options': [f'{x}=.*' for x in cmd['options'].keys()]
+            }
+        }
+        self.process_kwargs(kwargs_cmd)
+
+        # Then add the new values
+        if cmd['operation'] != 'set':
+            return
+
+        kwargs_cmd['operation'] = 'add'
+        kwargs_cmd['kwargs']['default_options'] = []
+
+        cdata = self.interpreter.coredata
+        options = {
+            **{str(k): v for k, v in cdata.options.items()},
+            **{str(k): v for k, v in cdata.options.items()},
+            **{str(k): v for k, v in cdata.options.items()},
+            **{str(k): v for k, v in cdata.options.items()},
+            **{str(k): v for k, v in cdata.options.items()},
+        }
+
+        for key, val in sorted(cmd['options'].items()):
+            if key not in options:
+                mlog.error('Unknown options', mlog.bold(key), *self.on_error())
+                self.handle_error()
+                continue
+
+            try:
+                val = options[key].validate_value(val)
+            except MesonException as e:
+                mlog.error('Unable to set', mlog.bold(key), mlog.red(str(e)), *self.on_error())
+                self.handle_error()
+                continue
+
+            kwargs_cmd['kwargs']['default_options'] += [f'{key}={val}']
+
+        self.process_kwargs(kwargs_cmd)
+
+    @RequiredKeys(rewriter_keys['kwargs'])
+    def process_kwargs(self, cmd):
+        mlog.log('Processing function type', mlog.bold(cmd['function']), 'with id', mlog.cyan("'" + cmd['id'] + "'"))
+        if cmd['function'] not in rewriter_func_kwargs:
+            mlog.error('Unknown function type', cmd['function'], *self.on_error())
+            return self.handle_error()
+        kwargs_def = rewriter_func_kwargs[cmd['function']]
+
+        # Find the function node to modify
+        node = None
+        arg_node = None
+        if cmd['function'] == 'project':
+            # msys bash may expand '/' to a path. It will mangle '//' to '/'
+            # but in order to keep usage shell-agnostic, also allow `//` as
+            # the function ID such that it will work in both msys bash and
+            # other shells.
+            if {'/', '//'}.isdisjoint({cmd['id']}):
+                mlog.error('The ID for the function type project must be "/" or "//" not "' + cmd['id'] + '"', *self.on_error())
+                return self.handle_error()
+            node = self.interpreter.project_node
+            arg_node = node.args
+        elif cmd['function'] == 'target':
+            tmp = self.find_target(cmd['id'])
+            if tmp:
+                node = tmp['node']
+                arg_node = node.args
+        elif cmd['function'] == 'dependency':
+            tmp = self.find_dependency(cmd['id'])
+            if tmp:
+                node = tmp['node']
+                arg_node = node.args
+        if not node:
+            mlog.error('Unable to find the function node')
+        assert isinstance(node, FunctionNode)
+        assert isinstance(arg_node, ArgumentNode)
+        # Transform the key nodes to plain strings
+        arg_node.kwargs = {k.value: v for k, v in arg_node.kwargs.items()}
+
+        # Print kwargs info
+        if cmd['operation'] == 'info':
+            info_data = {}
+            for key, val in sorted(arg_node.kwargs.items()):
+                info_data[key] = None
+                if isinstance(val, ElementaryNode):
+                    info_data[key] = val.value
+                elif isinstance(val, ArrayNode):
+                    data_list = []
+                    for i in val.args.arguments:
+                        element = None
+                        if isinstance(i, ElementaryNode):
+                            element = i.value
+                        data_list += [element]
+                    info_data[key] = data_list
+
+            self.add_info('kwargs', '{}#{}'.format(cmd['function'], cmd['id']), info_data)
+            return # Nothing else to do
+
+        # Modify the kwargs
+        num_changed = 0
+        for key, val in sorted(cmd['kwargs'].items()):
+            if key not in kwargs_def:
+                mlog.error('Cannot modify unknown kwarg', mlog.bold(key), *self.on_error())
+                self.handle_error()
+                continue
+
+            # Remove the key from the kwargs
+            if cmd['operation'] == 'delete':
+                if key in arg_node.kwargs:
+                    mlog.log('  -- Deleting', mlog.bold(key), 'from the kwargs')
+                    del arg_node.kwargs[key]
+                    num_changed += 1
+                else:
+                    mlog.log('  -- Key', mlog.bold(key), 'is already deleted')
+                continue
+
+            if key not in arg_node.kwargs:
+                arg_node.kwargs[key] = None
+            modifier = kwargs_def[key](arg_node.kwargs[key])
+            if not modifier.can_modify():
+                mlog.log('  -- Skipping', mlog.bold(key), 'because it is to complex to modify')
+
+            # Apply the operation
+            val_str = str(val)
+            if cmd['operation'] == 'set':
+                mlog.log('  -- Setting', mlog.bold(key), 'to', mlog.yellow(val_str))
+                modifier.set_value(val)
+            elif cmd['operation'] == 'add':
+                mlog.log('  -- Adding', mlog.yellow(val_str), 'to', mlog.bold(key))
+                modifier.add_value(val)
+            elif cmd['operation'] == 'remove':
+                mlog.log('  -- Removing', mlog.yellow(val_str), 'from', mlog.bold(key))
+                modifier.remove_value(val)
+            elif cmd['operation'] == 'remove_regex':
+                mlog.log('  -- Removing all values matching', mlog.yellow(val_str), 'from', mlog.bold(key))
+                modifier.remove_regex(val)
+
+            # Write back the result
+            arg_node.kwargs[key] = modifier.get_node()
+            num_changed += 1
+
+        # Convert the keys back to IdNode's
+        arg_node.kwargs = {IdNode(Token('', '', 0, 0, 0, None, k)): v for k, v in arg_node.kwargs.items()}
+        for k, v in arg_node.kwargs.items():
+            k.level = v.level
+        if num_changed > 0 and node not in self.modified_nodes:
+            self.modified_nodes += [node]
+
+    def find_assignment_node(self, node: BaseNode) -> AssignmentNode:
+        if node.ast_id and node.ast_id in self.interpreter.reverse_assignment:
+            return self.interpreter.reverse_assignment[node.ast_id]
+        return None
+
+    @RequiredKeys(rewriter_keys['target'])
+    def process_target(self, cmd):
+        mlog.log('Processing target', mlog.bold(cmd['target']), 'operation', mlog.cyan(cmd['operation']))
+        target = self.find_target(cmd['target'])
+        if target is None and cmd['operation'] != 'target_add':
+            mlog.error('Unknown target', mlog.bold(cmd['target']), *self.on_error())
+            return self.handle_error()
+
+        # Make source paths relative to the current subdir
+        def rel_source(src: str) -> str:
+            subdir = os.path.abspath(os.path.join(self.sourcedir, target['subdir']))
+            if os.path.isabs(src):
+                return os.path.relpath(src, subdir)
+            elif not os.path.exists(src):
+                return src # Trust the user when the source doesn't exist
+            # Make sure that the path is relative to the subdir
+            return os.path.relpath(os.path.abspath(src), subdir)
+
+        if target is not None:
+            cmd['sources'] = [rel_source(x) for x in cmd['sources']]
+
+        # Utility function to get a list of the sources from a node
+        def arg_list_from_node(n):
+            args = []
+            if isinstance(n, FunctionNode):
+                args = list(n.args.arguments)
+                if n.func_name in BUILD_TARGET_FUNCTIONS:
+                    args.pop(0)
+            elif isinstance(n, ArrayNode):
+                args = n.args.arguments
+            elif isinstance(n, ArgumentNode):
+                args = n.arguments
+            return args
+
+        to_sort_nodes = []
+
+        if cmd['operation'] == 'src_add':
+            node = None
+            if target['sources']:
+                node = target['sources'][0]
+            else:
+                node = target['node']
+            assert node is not None
+
+            # Generate the current source list
+            src_list = []
+            for i in target['sources']:
+                for j in arg_list_from_node(i):
+                    if isinstance(j, StringNode):
+                        src_list += [j.value]
+
+            # Generate the new String nodes
+            to_append = []
+            for i in sorted(set(cmd['sources'])):
+                if i in src_list:
+                    mlog.log('  -- Source', mlog.green(i), 'is already defined for the target --> skipping')
+                    continue
+                mlog.log('  -- Adding source', mlog.green(i), 'at',
+                         mlog.yellow(f'{node.filename}:{node.lineno}'))
+                token = Token('string', node.filename, 0, 0, 0, None, i)
+                to_append += [StringNode(token)]
+
+            # Append to the AST at the right place
+            arg_node = None
+            if isinstance(node, (FunctionNode, ArrayNode)):
+                arg_node = node.args
+            elif isinstance(node, ArgumentNode):
+                arg_node = node
+            assert arg_node is not None
+            arg_node.arguments += to_append
+
+            # Mark the node as modified
+            if arg_node not in to_sort_nodes and not isinstance(node, FunctionNode):
+                to_sort_nodes += [arg_node]
+            if node not in self.modified_nodes:
+                self.modified_nodes += [node]
+
+        elif cmd['operation'] == 'src_rm':
+            # Helper to find the exact string node and its parent
+            def find_node(src):
+                for i in target['sources']:
+                    for j in arg_list_from_node(i):
+                        if isinstance(j, StringNode):
+                            if j.value == src:
+                                return i, j
+                return None, None
+
+            for i in cmd['sources']:
+                # Try to find the node with the source string
+                root, string_node = find_node(i)
+                if root is None:
+                    mlog.warning('  -- Unable to find source', mlog.green(i), 'in the target')
+                    continue
+
+                # Remove the found string node from the argument list
+                arg_node = None
+                if isinstance(root, (FunctionNode, ArrayNode)):
+                    arg_node = root.args
+                elif isinstance(root, ArgumentNode):
+                    arg_node = root
+                assert arg_node is not None
+                mlog.log('  -- Removing source', mlog.green(i), 'from',
+                         mlog.yellow(f'{string_node.filename}:{string_node.lineno}'))
+                arg_node.arguments.remove(string_node)
+
+                # Mark the node as modified
+                if arg_node not in to_sort_nodes and not isinstance(root, FunctionNode):
+                    to_sort_nodes += [arg_node]
+                if root not in self.modified_nodes:
+                    self.modified_nodes += [root]
+
+        elif cmd['operation'] == 'extra_files_add':
+            tgt_function: FunctionNode = target['node']
+            mark_array = True
+            try:
+                node = target['extra_files'][0]
+            except IndexError:
+                # Specifying `extra_files` with a list that flattens to empty gives an empty
+                # target['extra_files'] list, account for that.
+                try:
+                    extra_files_key = next(k for k in tgt_function.args.kwargs.keys() if isinstance(k, IdNode) and k.value == 'extra_files')
+                    node = tgt_function.args.kwargs[extra_files_key]
+                except StopIteration:
+                    # Target has no extra_files kwarg, create one
+                    node = ArrayNode(ArgumentNode(Token('', tgt_function.filename, 0, 0, 0, None, '[]')), tgt_function.end_lineno, tgt_function.end_colno, tgt_function.end_lineno, tgt_function.end_colno)
+                    tgt_function.args.kwargs[IdNode(Token('string', tgt_function.filename, 0, 0, 0, None, 'extra_files'))] = node
+                    mark_array = False
+                    if tgt_function not in self.modified_nodes:
+                        self.modified_nodes += [tgt_function]
+                target['extra_files'] = [node]
+            if isinstance(node, IdNode):
+                node = self.interpreter.assignments[node.value]
+                target['extra_files'] = [node]
+            if not isinstance(node, ArrayNode):
+                mlog.error('Target', mlog.bold(cmd['target']), 'extra_files argument must be a list', *self.on_error())
+                return self.handle_error()
+
+            # Generate the current extra files list
+            extra_files_list = []
+            for i in target['extra_files']:
+                for j in arg_list_from_node(i):
+                    if isinstance(j, StringNode):
+                        extra_files_list += [j.value]
+
+            # Generate the new String nodes
+            to_append = []
+            for i in sorted(set(cmd['sources'])):
+                if i in extra_files_list:
+                    mlog.log('  -- Extra file', mlog.green(i), 'is already defined for the target --> skipping')
+                    continue
+                mlog.log('  -- Adding extra file', mlog.green(i), 'at',
+                         mlog.yellow(f'{node.filename}:{node.lineno}'))
+                token = Token('string', node.filename, 0, 0, 0, None, i)
+                to_append += [StringNode(token)]
+
+            # Append to the AST at the right place
+            arg_node = node.args
+            arg_node.arguments += to_append
+
+            # Mark the node as modified
+            if arg_node not in to_sort_nodes:
+                to_sort_nodes += [arg_node]
+            # If the extra_files array is newly created, don't mark it as its parent function node already is,
+            # otherwise this would cause double modification.
+            if mark_array and node not in self.modified_nodes:
+                self.modified_nodes += [node]
+
+        elif cmd['operation'] == 'extra_files_rm':
+            # Helper to find the exact string node and its parent
+            def find_node(src):
+                for i in target['extra_files']:
+                    for j in arg_list_from_node(i):
+                        if isinstance(j, StringNode):
+                            if j.value == src:
+                                return i, j
+                return None, None
+
+            for i in cmd['sources']:
+                # Try to find the node with the source string
+                root, string_node = find_node(i)
+                if root is None:
+                    mlog.warning('  -- Unable to find extra file', mlog.green(i), 'in the target')
+                    continue
+
+                # Remove the found string node from the argument list
+                arg_node = root.args
+                mlog.log('  -- Removing extra file', mlog.green(i), 'from',
+                         mlog.yellow(f'{string_node.filename}:{string_node.lineno}'))
+                arg_node.arguments.remove(string_node)
+
+                # Mark the node as modified
+                if arg_node not in to_sort_nodes and not isinstance(root, FunctionNode):
+                    to_sort_nodes += [arg_node]
+                if root not in self.modified_nodes:
+                    self.modified_nodes += [root]
+
+        elif cmd['operation'] == 'target_add':
+            if target is not None:
+                mlog.error('Can not add target', mlog.bold(cmd['target']), 'because it already exists', *self.on_error())
+                return self.handle_error()
+
+            id_base = re.sub(r'[- ]', '_', cmd['target'])
+            target_id = id_base + '_exe' if cmd['target_type'] == 'executable' else '_lib'
+            source_id = id_base + '_sources'
+            filename = os.path.join(cmd['subdir'], environment.build_filename)
+
+            # Build src list
+            src_arg_node = ArgumentNode(Token('string', filename, 0, 0, 0, None, ''))
+            src_arr_node = ArrayNode(src_arg_node, 0, 0, 0, 0)
+            src_far_node = ArgumentNode(Token('string', filename, 0, 0, 0, None, ''))
+            src_fun_node = FunctionNode(filename, 0, 0, 0, 0, 'files', src_far_node)
+            src_ass_node = AssignmentNode(filename, 0, 0, source_id, src_fun_node)
+            src_arg_node.arguments = [StringNode(Token('string', filename, 0, 0, 0, None, x)) for x in cmd['sources']]
+            src_far_node.arguments = [src_arr_node]
+
+            # Build target
+            tgt_arg_node = ArgumentNode(Token('string', filename, 0, 0, 0, None, ''))
+            tgt_fun_node = FunctionNode(filename, 0, 0, 0, 0, cmd['target_type'], tgt_arg_node)
+            tgt_ass_node = AssignmentNode(filename, 0, 0, target_id, tgt_fun_node)
+            tgt_arg_node.arguments = [
+                StringNode(Token('string', filename, 0, 0, 0, None, cmd['target'])),
+                IdNode(Token('string', filename, 0, 0, 0, None, source_id))
+            ]
+
+            src_ass_node.accept(AstIndentationGenerator())
+            tgt_ass_node.accept(AstIndentationGenerator())
+            self.to_add_nodes += [src_ass_node, tgt_ass_node]
+
+        elif cmd['operation'] == 'target_rm':
+            to_remove = self.find_assignment_node(target['node'])
+            if to_remove is None:
+                to_remove = target['node']
+            self.to_remove_nodes += [to_remove]
+            mlog.log('  -- Removing target', mlog.green(cmd['target']), 'at',
+                     mlog.yellow(f'{to_remove.filename}:{to_remove.lineno}'))
+
+        elif cmd['operation'] == 'info':
+            # T.List all sources in the target
+            src_list = []
+            for i in target['sources']:
+                for j in arg_list_from_node(i):
+                    if isinstance(j, StringNode):
+                        src_list += [j.value]
+            extra_files_list = []
+            for i in target['extra_files']:
+                for j in arg_list_from_node(i):
+                    if isinstance(j, StringNode):
+                        extra_files_list += [j.value]
+            test_data = {
+                'name': target['name'],
+                'sources': src_list,
+                'extra_files': extra_files_list
+            }
+            self.add_info('target', target['id'], test_data)
+
+        # Sort files
+        for i in to_sort_nodes:
+            convert = lambda text: int(text) if text.isdigit() else text.lower()
+            alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
+            path_sorter = lambda key: ([(key.count('/') <= idx, alphanum_key(x)) for idx, x in enumerate(key.split('/'))])
+
+            unknown = [x for x in i.arguments if not isinstance(x, StringNode)]
+            sources = [x for x in i.arguments if isinstance(x, StringNode)]
+            sources = sorted(sources, key=lambda x: path_sorter(x.value))
+            i.arguments = unknown + sources
+
+    def process(self, cmd):
+        if 'type' not in cmd:
+            raise RewriterException('Command has no key "type"')
+        if cmd['type'] not in self.functions:
+            raise RewriterException('Unknown command "{}". Supported commands are: {}'
+                                    .format(cmd['type'], list(self.functions.keys())))
+        self.functions[cmd['type']](cmd)
+
+    def apply_changes(self):
+        assert all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'filename') for x in self.modified_nodes)
+        assert all(hasattr(x, 'lineno') and hasattr(x, 'colno') and hasattr(x, 'filename') for x in self.to_remove_nodes)
+        assert all(isinstance(x, (ArrayNode, FunctionNode)) for x in self.modified_nodes)
+        assert all(isinstance(x, (ArrayNode, AssignmentNode, FunctionNode)) for x in self.to_remove_nodes)
+        # Sort based on line and column in reversed order
+        work_nodes = [{'node': x, 'action': 'modify'} for x in self.modified_nodes]
+        work_nodes += [{'node': x, 'action': 'rm'} for x in self.to_remove_nodes]
+        work_nodes = sorted(work_nodes, key=lambda x: (x['node'].lineno, x['node'].colno), reverse=True)
+        work_nodes += [{'node': x, 'action': 'add'} for x in self.to_add_nodes]
+
+        # Generating the new replacement string
+        str_list = []
+        for i in work_nodes:
+            new_data = ''
+            if i['action'] == 'modify' or i['action'] == 'add':
+                printer = AstPrinter()
+                i['node'].accept(printer)
+                printer.post_process()
+                new_data = printer.result.strip()
+            data = {
+                'file': i['node'].filename,
+                'str': new_data,
+                'node': i['node'],
+                'action': i['action']
+            }
+            str_list += [data]
+
+        # Load build files
+        files = {}
+        for i in str_list:
+            if i['file'] in files:
+                continue
+            fpath = os.path.realpath(os.path.join(self.sourcedir, i['file']))
+            fdata = ''
+            # Create an empty file if it does not exist
+            if not os.path.exists(fpath):
+                with open(fpath, 'w', encoding='utf-8'):
+                    pass
+            with open(fpath, encoding='utf-8') as fp:
+                fdata = fp.read()
+
+            # Generate line offsets numbers
+            m_lines = fdata.splitlines(True)
+            offset = 0
+            line_offsets = []
+            for j in m_lines:
+                line_offsets += [offset]
+                offset += len(j)
+
+            files[i['file']] = {
+                'path': fpath,
+                'raw': fdata,
+                'offsets': line_offsets
+            }
+
+        # Replace in source code
+        def remove_node(i):
+            offsets = files[i['file']]['offsets']
+            raw = files[i['file']]['raw']
+            node = i['node']
+            line = node.lineno - 1
+            col = node.colno
+            start = offsets[line] + col
+            end = start
+            if isinstance(node, (ArrayNode, FunctionNode)):
+                end = offsets[node.end_lineno - 1] + node.end_colno
+
+            # Only removal is supported for assignments
+            elif isinstance(node, AssignmentNode) and i['action'] == 'rm':
+                if isinstance(node.value, (ArrayNode, FunctionNode)):
+                    remove_node({'file': i['file'], 'str': '', 'node': node.value, 'action': 'rm'})
+                    raw = files[i['file']]['raw']
+                while raw[end] != '=':
+                    end += 1
+                end += 1 # Handle the '='
+                while raw[end] in {' ', '\n', '\t'}:
+                    end += 1
+
+            files[i['file']]['raw'] = raw[:start] + i['str'] + raw[end:]
+
+        for i in str_list:
+            if i['action'] in {'modify', 'rm'}:
+                remove_node(i)
+            elif i['action'] == 'add':
+                files[i['file']]['raw'] += i['str'] + '\n'
+
+        # Write the files back
+        for key, val in files.items():
+            mlog.log('Rewriting', mlog.yellow(key))
+            with open(val['path'], 'w', encoding='utf-8') as fp:
+                fp.write(val['raw'])
+
+target_operation_map = {
+    'add': 'src_add',
+    'rm': 'src_rm',
+    'add_target': 'target_add',
+    'rm_target': 'target_rm',
+    'add_extra_files': 'extra_files_add',
+    'rm_extra_files': 'extra_files_rm',
+    'info': 'info',
+}
+
+def list_to_dict(in_list: T.List[str]) -> T.Dict[str, str]:
+    result = {}
+    it = iter(in_list)
+    try:
+        for i in it:
+            # calling next(it) is not a mistake, we're taking the next element from
+            # the iterator, avoiding the need to preprocess it into a sequence of
+            # key value pairs.
+            result[i] = next(it)
+    except StopIteration:
+        raise TypeError('in_list parameter of list_to_dict must have an even length.')
+    return result
+
+def generate_target(options) -> T.List[dict]:
+    return [{
+        'type': 'target',
+        'target': options.target,
+        'operation': target_operation_map[options.operation],
+        'sources': options.sources,
+        'subdir': options.subdir,
+        'target_type': options.tgt_type,
+    }]
+
+def generate_kwargs(options) -> T.List[dict]:
+    return [{
+        'type': 'kwargs',
+        'function': options.function,
+        'id': options.id,
+        'operation': options.operation,
+        'kwargs': list_to_dict(options.kwargs),
+    }]
+
+def generate_def_opts(options) -> T.List[dict]:
+    return [{
+        'type': 'default_options',
+        'operation': options.operation,
+        'options': list_to_dict(options.options),
+    }]
+
+def generate_cmd(options) -> T.List[dict]:
+    if os.path.exists(options.json):
+        with open(options.json, encoding='utf-8') as fp:
+            return json.load(fp)
+    else:
+        return json.loads(options.json)
+
+# Map options.type to the actual type name
+cli_type_map = {
+    'target': generate_target,
+    'tgt': generate_target,
+    'kwargs': generate_kwargs,
+    'default-options': generate_def_opts,
+    'def': generate_def_opts,
+    'command': generate_cmd,
+    'cmd': generate_cmd,
+}
+
+def run(options):
+    if not options.verbose:
+        mlog.set_quiet()
+
+    try:
+        setup_vsenv()
+        rewriter = Rewriter(options.sourcedir, skip_errors=options.skip)
+        rewriter.analyze_meson()
+
+        if options.type is None:
+            mlog.error('No command specified')
+            return 1
+
+        commands = cli_type_map[options.type](options)
+
+        if not isinstance(commands, list):
+            raise TypeError('Command is not a list')
+
+        for i in commands:
+            if not isinstance(i, object):
+                raise TypeError('Command is not an object')
+            rewriter.process(i)
+
+        rewriter.apply_changes()
+        rewriter.print_info()
+        return 0
+    except Exception as e:
+        raise e
+    finally:
+        mlog.set_verbose()
diff --git a/vendored-meson/meson/mesonbuild/scripts/__init__.py b/vendored-meson/meson/mesonbuild/scripts/__init__.py
new file mode 100644
index 000000000000..72777713aadd
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/__init__.py
@@ -0,0 +1,21 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from pathlib import PurePath
+
+def destdir_join(d1: str, d2: str) -> str:
+    if not d1:
+        return d2
+    # c:\destdir + c:\prefix must produce c:\destdir\prefix
+    return str(PurePath(d1, *PurePath(d2).parts[1:]))
diff --git a/vendored-meson/meson/mesonbuild/scripts/clangformat.py b/vendored-meson/meson/mesonbuild/scripts/clangformat.py
new file mode 100644
index 000000000000..c66df1600ebe
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/clangformat.py
@@ -0,0 +1,65 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import argparse
+import subprocess
+from pathlib import Path
+
+from .run_tool import run_tool
+from ..environment import detect_clangformat
+from ..mesonlib import version_compare
+from ..programs import ExternalProgram
+import typing as T
+
+def run_clang_format(fname: Path, exelist: T.List[str], check: bool, cformat_ver: T.Optional[str]) -> subprocess.CompletedProcess:
+    clangformat_10 = False
+    if check and cformat_ver:
+        if version_compare(cformat_ver, '>=10'):
+            clangformat_10 = True
+            exelist = exelist + ['--dry-run', '--Werror']
+        else:
+            original = fname.read_bytes()
+    before = fname.stat().st_mtime
+    ret = subprocess.run(exelist + ['-style=file', '-i', str(fname)])
+    after = fname.stat().st_mtime
+    if before != after:
+        print('File reformatted: ', fname)
+        if check and not clangformat_10:
+            # Restore the original if only checking.
+            fname.write_bytes(original)
+            ret.returncode = 1
+    return ret
+
+def run(args: T.List[str]) -> int:
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--check', action='store_true')
+    parser.add_argument('sourcedir')
+    parser.add_argument('builddir')
+    options = parser.parse_args(args)
+
+    srcdir = Path(options.sourcedir)
+    builddir = Path(options.builddir)
+
+    exelist = detect_clangformat()
+    if not exelist:
+        print('Could not execute clang-format "%s"' % ' '.join(exelist))
+        return 1
+
+    if options.check:
+        cformat_ver = ExternalProgram('clang-format', exelist, silent=True).get_version()
+    else:
+        cformat_ver = None
+
+    return run_tool('clang-format', srcdir, builddir, run_clang_format, exelist, options.check, cformat_ver)
diff --git a/vendored-meson/meson/mesonbuild/scripts/clangtidy.py b/vendored-meson/meson/mesonbuild/scripts/clangtidy.py
new file mode 100644
index 000000000000..324a26ea00e5
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/clangtidy.py
@@ -0,0 +1,35 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import argparse
+import subprocess
+from pathlib import Path
+
+from .run_tool import run_tool
+import typing as T
+
+def run_clang_tidy(fname: Path, builddir: Path) -> subprocess.CompletedProcess:
+    return subprocess.run(['clang-tidy', '-p', str(builddir), str(fname)])
+
+def run(args: T.List[str]) -> int:
+    parser = argparse.ArgumentParser()
+    parser.add_argument('sourcedir')
+    parser.add_argument('builddir')
+    options = parser.parse_args(args)
+
+    srcdir = Path(options.sourcedir)
+    builddir = Path(options.builddir)
+
+    return run_tool('clang-tidy', srcdir, builddir, run_clang_tidy, builddir)
diff --git a/vendored-meson/meson/mesonbuild/scripts/cleantrees.py b/vendored-meson/meson/mesonbuild/scripts/cleantrees.py
new file mode 100644
index 000000000000..3512f5658782
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/cleantrees.py
@@ -0,0 +1,45 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import sys
+import shutil
+import pickle
+import typing as T
+
+def rmtrees(build_dir: str, trees: T.List[str]) -> None:
+    for t in trees:
+        # Never delete trees outside of the builddir
+        if os.path.isabs(t):
+            print(f'Cannot delete dir with absolute path {t!r}')
+            continue
+        bt = os.path.join(build_dir, t)
+        # Skip if it doesn't exist, or if it is not a directory
+        if os.path.isdir(bt):
+            shutil.rmtree(bt, ignore_errors=True)
+
+def run(args: T.List[str]) -> int:
+    if len(args) != 1:
+        print('Cleaner script for Meson. Do not run on your own please.')
+        print('cleantrees.py ')
+        return 1
+    with open(args[0], 'rb') as f:
+        data = pickle.load(f)
+    rmtrees(data.build_dir, data.trees)
+    # Never fail cleaning
+    return 0
+
+if __name__ == '__main__':
+    run(sys.argv[1:])
diff --git a/vendored-meson/meson/mesonbuild/scripts/cmake_run_ctgt.py b/vendored-meson/meson/mesonbuild/scripts/cmake_run_ctgt.py
new file mode 100755
index 000000000000..755530a6805b
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/cmake_run_ctgt.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python3
+from __future__ import annotations
+
+import argparse
+import subprocess
+import shutil
+import sys
+from pathlib import Path
+import typing as T
+
+def run(argsv: T.List[str]) -> int:
+    commands = [[]]  # type: T.List[T.List[str]]
+    SEPARATOR = ';;;'
+
+    # Generate CMD parameters
+    parser = argparse.ArgumentParser(description='Wrapper for add_custom_command')
+    parser.add_argument('-d', '--directory', type=str, metavar='D', required=True, help='Working directory to cwd to')
+    parser.add_argument('-o', '--outputs', nargs='+', metavar='O', required=True, help='Expected output files')
+    parser.add_argument('-O', '--original-outputs', nargs='*', metavar='O', default=[], help='Output files expected by CMake')
+    parser.add_argument('commands', nargs=argparse.REMAINDER, help=f'A "{SEPARATOR}" separated list of commands')
+
+    # Parse
+    args = parser.parse_args(argsv)
+    directory = Path(args.directory)
+
+    dummy_target = None
+    if len(args.outputs) == 1 and len(args.original_outputs) == 0:
+        dummy_target = Path(args.outputs[0])
+    elif len(args.outputs) != len(args.original_outputs):
+        print('Length of output list and original output list differ')
+        return 1
+
+    for i in args.commands:
+        if i == SEPARATOR:
+            commands += [[]]
+            continue
+
+        i = i.replace('"', '')  # Remove leftover quotes
+        commands[-1] += [i]
+
+    # Execute
+    for i in commands:
+        # Skip empty lists
+        if not i:
+            continue
+
+        cmd = []
+        stdout = None
+        stderr = None
+        capture_file = ''
+
+        for j in i:
+            if j in {'>', '>>'}:
+                stdout = subprocess.PIPE
+                continue
+            elif j in {'&>', '&>>'}:
+                stdout = subprocess.PIPE
+                stderr = subprocess.STDOUT
+                continue
+
+            if stdout is not None or stderr is not None:
+                capture_file += j
+            else:
+                cmd += [j]
+
+        try:
+            directory.mkdir(parents=True, exist_ok=True)
+
+            res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=str(directory), check=True)
+            if capture_file:
+                out_file = directory / capture_file
+                out_file.write_bytes(res.stdout)
+        except subprocess.CalledProcessError:
+            return 1
+
+    if dummy_target:
+        dummy_target.touch()
+        return 0
+
+    # Copy outputs
+    zipped_outputs = zip([Path(x) for x in args.outputs], [Path(x) for x in args.original_outputs])
+    for expected, generated in zipped_outputs:
+        do_copy = False
+        if not expected.exists():
+            if not generated.exists():
+                print('Unable to find generated file. This can cause the build to fail:')
+                print(generated)
+                do_copy = False
+            else:
+                do_copy = True
+        elif generated.exists():
+            if generated.stat().st_mtime > expected.stat().st_mtime:
+                do_copy = True
+
+        if do_copy:
+            if expected.exists():
+                expected.unlink()
+            shutil.copyfile(str(generated), str(expected))
+
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff --git a/vendored-meson/meson/mesonbuild/scripts/cmd_or_ps.ps1 b/vendored-meson/meson/mesonbuild/scripts/cmd_or_ps.ps1
new file mode 100644
index 000000000000..96c32e29df12
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/cmd_or_ps.ps1
@@ -0,0 +1,17 @@
+# Copied from GStreamer project
+# Author: Seungha Yang 
+#         Xavier Claessens 
+
+$i=1
+$ppid=$PID
+do {
+  $ppid=(Get-CimInstance Win32_Process -Filter "ProcessId=$ppid").parentprocessid
+  $pname=(Get-Process -id $ppid).Name
+  if($pname -eq "cmd" -Or $pname -eq "powershell" -Or $pname -eq "pwsh") {
+    Write-Host ("{0}.exe" -f $pname)
+    Break
+  }
+  # not found yet, find grand parent
+  # 10 times iteration seems to be sufficient
+  $i++
+} while ($i -lt 10)
diff --git a/vendored-meson/meson/mesonbuild/scripts/copy.py b/vendored-meson/meson/mesonbuild/scripts/copy.py
new file mode 100644
index 000000000000..dba13a57ed98
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/copy.py
@@ -0,0 +1,19 @@
+# SPDX-License-Identifer: Apache-2.0
+# Copyright © 2021 Intel Corporation
+from __future__ import annotations
+
+"""Helper script to copy files at build time.
+
+This is easier than trying to detect whether to use copy, cp, or something else.
+"""
+
+import shutil
+import typing as T
+
+
+def run(args: T.List[str]) -> int:
+    try:
+        shutil.copy2(args[0], args[1])
+    except Exception:
+        return 1
+    return 0
diff --git a/vendored-meson/meson/mesonbuild/scripts/coverage.py b/vendored-meson/meson/mesonbuild/scripts/coverage.py
new file mode 100644
index 000000000000..cb865d08d258
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/coverage.py
@@ -0,0 +1,202 @@
+# Copyright 2017 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from mesonbuild import environment, mesonlib
+
+import argparse, re, sys, os, subprocess, pathlib, stat
+import typing as T
+
+def coverage(outputs: T.List[str], source_root: str, subproject_root: str, build_root: str, log_dir: str, use_llvm_cov: bool) -> int:
+    outfiles = []
+    exitcode = 0
+
+    (gcovr_exe, gcovr_version, lcov_exe, genhtml_exe, llvm_cov_exe) = environment.find_coverage_tools()
+
+    # load config files for tools if available in the source tree
+    # - lcov requires manually specifying a per-project config
+    # - gcovr picks up the per-project config, and also supports filtering files
+    #   so don't exclude subprojects ourselves, if the project has a config,
+    #   because they either don't want that, or should set it themselves
+    lcovrc = os.path.join(source_root, '.lcovrc')
+    if os.path.exists(lcovrc):
+        lcov_config = ['--config-file', lcovrc]
+    else:
+        lcov_config = []
+
+    gcovr_config = ['-e', re.escape(subproject_root)]
+
+    # gcovr >= 4.2 requires a different syntax for out of source builds
+    if gcovr_exe and mesonlib.version_compare(gcovr_version, '>=4.2'):
+        gcovr_base_cmd = [gcovr_exe, '-r', source_root, build_root]
+        # it also started supporting the config file
+        if os.path.exists(os.path.join(source_root, 'gcovr.cfg')):
+            gcovr_config = []
+    else:
+        gcovr_base_cmd = [gcovr_exe, '-r', build_root]
+
+    if use_llvm_cov:
+        gcov_exe_args = ['--gcov-executable', llvm_cov_exe + ' gcov']
+    else:
+        gcov_exe_args = []
+
+    if not outputs or 'xml' in outputs:
+        if gcovr_exe and mesonlib.version_compare(gcovr_version, '>=3.3'):
+            subprocess.check_call(gcovr_base_cmd + gcovr_config +
+                                  ['-x',
+                                   '-o', os.path.join(log_dir, 'coverage.xml')
+                                   ] + gcov_exe_args)
+            outfiles.append(('Xml', pathlib.Path(log_dir, 'coverage.xml')))
+        elif outputs:
+            print('gcovr >= 3.3 needed to generate Xml coverage report')
+            exitcode = 1
+
+    if not outputs or 'sonarqube' in outputs:
+        if gcovr_exe and mesonlib.version_compare(gcovr_version, '>=4.2'):
+            subprocess.check_call(gcovr_base_cmd + gcovr_config +
+                                  ['--sonarqube',
+                                   '-o', os.path.join(log_dir, 'sonarqube.xml'),
+                                   ] + gcov_exe_args)
+            outfiles.append(('Sonarqube', pathlib.Path(log_dir, 'sonarqube.xml')))
+        elif outputs:
+            print('gcovr >= 4.2 needed to generate Xml coverage report')
+            exitcode = 1
+
+    if not outputs or 'text' in outputs:
+        if gcovr_exe and mesonlib.version_compare(gcovr_version, '>=3.3'):
+            subprocess.check_call(gcovr_base_cmd + gcovr_config +
+                                  ['-o', os.path.join(log_dir, 'coverage.txt')] +
+                                  gcov_exe_args)
+            outfiles.append(('Text', pathlib.Path(log_dir, 'coverage.txt')))
+        elif outputs:
+            print('gcovr >= 3.3 needed to generate text coverage report')
+            exitcode = 1
+
+    if not outputs or 'html' in outputs:
+        if lcov_exe and genhtml_exe:
+            htmloutdir = os.path.join(log_dir, 'coveragereport')
+            covinfo = os.path.join(log_dir, 'coverage.info')
+            initial_tracefile = covinfo + '.initial'
+            run_tracefile = covinfo + '.run'
+            raw_tracefile = covinfo + '.raw'
+            if use_llvm_cov:
+                # Create a shim to allow using llvm-cov as a gcov tool.
+                if mesonlib.is_windows():
+                    llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.bat')
+                    with open(llvm_cov_shim_path, 'w', encoding='utf-8') as llvm_cov_bat:
+                        llvm_cov_bat.write(f'@"{llvm_cov_exe}" gcov %*')
+                else:
+                    llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.sh')
+                    with open(llvm_cov_shim_path, 'w', encoding='utf-8') as llvm_cov_sh:
+                        llvm_cov_sh.write(f'#!/usr/bin/env sh\nexec "{llvm_cov_exe}" gcov $@')
+                    os.chmod(llvm_cov_shim_path, os.stat(llvm_cov_shim_path).st_mode | stat.S_IEXEC)
+                gcov_tool_args = ['--gcov-tool', llvm_cov_shim_path]
+            else:
+                gcov_tool_args = []
+            subprocess.check_call([lcov_exe,
+                                   '--directory', build_root,
+                                   '--capture',
+                                   '--initial',
+                                   '--output-file',
+                                   initial_tracefile] +
+                                  lcov_config +
+                                  gcov_tool_args)
+            subprocess.check_call([lcov_exe,
+                                   '--directory', build_root,
+                                   '--capture',
+                                   '--output-file', run_tracefile,
+                                   '--no-checksum',
+                                   '--rc', 'lcov_branch_coverage=1'] +
+                                  lcov_config +
+                                  gcov_tool_args)
+            # Join initial and test results.
+            subprocess.check_call([lcov_exe,
+                                   '-a', initial_tracefile,
+                                   '-a', run_tracefile,
+                                   '--rc', 'lcov_branch_coverage=1',
+                                   '-o', raw_tracefile] + lcov_config)
+            # Remove all directories outside the source_root from the covinfo
+            subprocess.check_call([lcov_exe,
+                                   '--extract', raw_tracefile,
+                                   os.path.join(source_root, '*'),
+                                   '--rc', 'lcov_branch_coverage=1',
+                                   '--output-file', covinfo] + lcov_config)
+            # Remove all directories inside subproject dir
+            subprocess.check_call([lcov_exe,
+                                   '--remove', covinfo,
+                                   os.path.join(subproject_root, '*'),
+                                   '--rc', 'lcov_branch_coverage=1',
+                                   '--output-file', covinfo] + lcov_config)
+            subprocess.check_call([genhtml_exe,
+                                   '--prefix', build_root,
+                                   '--prefix', source_root,
+                                   '--output-directory', htmloutdir,
+                                   '--title', 'Code coverage',
+                                   '--legend',
+                                   '--show-details',
+                                   '--branch-coverage',
+                                   covinfo])
+            outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html')))
+        elif gcovr_exe and mesonlib.version_compare(gcovr_version, '>=3.3'):
+            htmloutdir = os.path.join(log_dir, 'coveragereport')
+            if not os.path.isdir(htmloutdir):
+                os.mkdir(htmloutdir)
+            subprocess.check_call(gcovr_base_cmd + gcovr_config +
+                                  ['--html',
+                                   '--html-details',
+                                   '--print-summary',
+                                   '-o', os.path.join(htmloutdir, 'index.html'),
+                                   ] + gcov_exe_args)
+            outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html')))
+        elif outputs:
+            print('lcov/genhtml or gcovr >= 3.3 needed to generate Html coverage report')
+            exitcode = 1
+
+    if not outputs and not outfiles:
+        print('Need gcovr or lcov/genhtml to generate any coverage reports')
+        exitcode = 1
+
+    if outfiles:
+        print('')
+        for (filetype, path) in outfiles:
+            print(filetype + ' coverage report can be found at', path.as_uri())
+
+    return exitcode
+
+def run(args: T.List[str]) -> int:
+    if not os.path.isfile('build.ninja'):
+        print('Coverage currently only works with the Ninja backend.')
+        return 1
+    parser = argparse.ArgumentParser(description='Generate coverage reports')
+    parser.add_argument('--text', dest='outputs', action='append_const',
+                        const='text', help='generate Text report')
+    parser.add_argument('--xml', dest='outputs', action='append_const',
+                        const='xml', help='generate Xml report')
+    parser.add_argument('--sonarqube', dest='outputs', action='append_const',
+                        const='sonarqube', help='generate Sonarqube Xml report')
+    parser.add_argument('--html', dest='outputs', action='append_const',
+                        const='html', help='generate Html report')
+    parser.add_argument('--use_llvm_cov', action='store_true',
+                        help='use llvm-cov')
+    parser.add_argument('source_root')
+    parser.add_argument('subproject_root')
+    parser.add_argument('build_root')
+    parser.add_argument('log_dir')
+    options = parser.parse_args(args)
+    return coverage(options.outputs, options.source_root,
+                    options.subproject_root, options.build_root,
+                    options.log_dir, options.use_llvm_cov)
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff --git a/vendored-meson/meson/mesonbuild/scripts/delwithsuffix.py b/vendored-meson/meson/mesonbuild/scripts/delwithsuffix.py
new file mode 100644
index 000000000000..f58b19ce3cac
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/delwithsuffix.py
@@ -0,0 +1,37 @@
+# Copyright 2013 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os, sys
+import typing as T
+
+def run(args: T.List[str]) -> int:
+    if len(args) != 2:
+        print('delwithsuffix.py  ')
+        sys.exit(1)
+
+    topdir = args[0]
+    suffix = args[1]
+    if suffix[0] != '.':
+        suffix = '.' + suffix
+
+    for (root, _, files) in os.walk(topdir):
+        for f in files:
+            if f.endswith(suffix):
+                fullname = os.path.join(root, f)
+                os.unlink(fullname)
+    return 0
+
+if __name__ == '__main__':
+    run(sys.argv[1:])
diff --git a/vendored-meson/meson/mesonbuild/scripts/depfixer.py b/vendored-meson/meson/mesonbuild/scripts/depfixer.py
new file mode 100644
index 000000000000..b9c58fee546c
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/depfixer.py
@@ -0,0 +1,505 @@
+# Copyright 2013-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+
+import sys
+import os
+import stat
+import struct
+import shutil
+import subprocess
+import typing as T
+
+from ..mesonlib import OrderedSet, generate_list, Popen_safe
+
+SHT_STRTAB = 3
+DT_NEEDED = 1
+DT_RPATH = 15
+DT_RUNPATH = 29
+DT_STRTAB = 5
+DT_SONAME = 14
+DT_MIPS_RLD_MAP_REL = 1879048245
+
+# Global cache for tools
+INSTALL_NAME_TOOL = False
+
+class DataSizes:
+    def __init__(self, ptrsize: int, is_le: bool) -> None:
+        if is_le:
+            p = '<'
+        else:
+            p = '>'
+        self.Half = p + 'h'
+        self.HalfSize = 2
+        self.Word = p + 'I'
+        self.WordSize = 4
+        self.Sword = p + 'i'
+        self.SwordSize = 4
+        if ptrsize == 64:
+            self.Addr = p + 'Q'
+            self.AddrSize = 8
+            self.Off = p + 'Q'
+            self.OffSize = 8
+            self.XWord = p + 'Q'
+            self.XWordSize = 8
+            self.Sxword = p + 'q'
+            self.SxwordSize = 8
+        else:
+            self.Addr = p + 'I'
+            self.AddrSize = 4
+            self.Off = p + 'I'
+            self.OffSize = 4
+
+class DynamicEntry(DataSizes):
+    def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None:
+        super().__init__(ptrsize, is_le)
+        self.ptrsize = ptrsize
+        if ptrsize == 64:
+            self.d_tag = struct.unpack(self.Sxword, ifile.read(self.SxwordSize))[0]
+            self.val = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+        else:
+            self.d_tag = struct.unpack(self.Sword, ifile.read(self.SwordSize))[0]
+            self.val = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+
+    def write(self, ofile: T.BinaryIO) -> None:
+        if self.ptrsize == 64:
+            ofile.write(struct.pack(self.Sxword, self.d_tag))
+            ofile.write(struct.pack(self.XWord, self.val))
+        else:
+            ofile.write(struct.pack(self.Sword, self.d_tag))
+            ofile.write(struct.pack(self.Word, self.val))
+
+class SectionHeader(DataSizes):
+    def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None:
+        super().__init__(ptrsize, is_le)
+        is_64 = ptrsize == 64
+
+# Elf64_Word
+        self.sh_name = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Word
+        self.sh_type = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Xword
+        if is_64:
+            self.sh_flags = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+        else:
+            self.sh_flags = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Addr
+        self.sh_addr = struct.unpack(self.Addr, ifile.read(self.AddrSize))[0]
+# Elf64_Off
+        self.sh_offset = struct.unpack(self.Off, ifile.read(self.OffSize))[0]
+# Elf64_Xword
+        if is_64:
+            self.sh_size = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+        else:
+            self.sh_size = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Word
+        self.sh_link = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Word
+        self.sh_info = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Xword
+        if is_64:
+            self.sh_addralign = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+        else:
+            self.sh_addralign = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+# Elf64_Xword
+        if is_64:
+            self.sh_entsize = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0]
+        else:
+            self.sh_entsize = struct.unpack(self.Word, ifile.read(self.WordSize))[0]
+
+class Elf(DataSizes):
+    def __init__(self, bfile: str, verbose: bool = True) -> None:
+        self.bfile = bfile
+        self.verbose = verbose
+        self.sections = []  # type: T.List[SectionHeader]
+        self.dynamic = []   # type: T.List[DynamicEntry]
+        self.open_bf(bfile)
+        try:
+            (self.ptrsize, self.is_le) = self.detect_elf_type()
+            super().__init__(self.ptrsize, self.is_le)
+            self.parse_header()
+            self.parse_sections()
+            self.parse_dynamic()
+        except (struct.error, RuntimeError):
+            self.close_bf()
+            raise
+
+    def open_bf(self, bfile: str) -> None:
+        self.bf = None
+        self.bf_perms = None
+        try:
+            self.bf = open(bfile, 'r+b')
+        except PermissionError as e:
+            self.bf_perms = stat.S_IMODE(os.lstat(bfile).st_mode)
+            os.chmod(bfile, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC)
+            try:
+                self.bf = open(bfile, 'r+b')
+            except Exception:
+                os.chmod(bfile, self.bf_perms)
+                self.bf_perms = None
+                raise e
+
+    def close_bf(self) -> None:
+        if self.bf is not None:
+            if self.bf_perms is not None:
+                os.fchmod(self.bf.fileno(), self.bf_perms)
+                self.bf_perms = None
+            self.bf.close()
+            self.bf = None
+
+    def __enter__(self) -> 'Elf':
+        return self
+
+    def __del__(self) -> None:
+        self.close_bf()
+
+    def __exit__(self, exc_type: T.Any, exc_value: T.Any, traceback: T.Any) -> None:
+        self.close_bf()
+
+    def detect_elf_type(self) -> T.Tuple[int, bool]:
+        data = self.bf.read(6)
+        if data[1:4] != b'ELF':
+            # This script gets called to non-elf targets too
+            # so just ignore them.
+            if self.verbose:
+                print(f'File {self.bfile!r} is not an ELF file.')
+            sys.exit(0)
+        if data[4] == 1:
+            ptrsize = 32
+        elif data[4] == 2:
+            ptrsize = 64
+        else:
+            sys.exit(f'File {self.bfile!r} has unknown ELF class.')
+        if data[5] == 1:
+            is_le = True
+        elif data[5] == 2:
+            is_le = False
+        else:
+            sys.exit(f'File {self.bfile!r} has unknown ELF endianness.')
+        return ptrsize, is_le
+
+    def parse_header(self) -> None:
+        self.bf.seek(0)
+        self.e_ident = struct.unpack('16s', self.bf.read(16))[0]
+        self.e_type = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+        self.e_machine = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+        self.e_version = struct.unpack(self.Word, self.bf.read(self.WordSize))[0]
+        self.e_entry = struct.unpack(self.Addr, self.bf.read(self.AddrSize))[0]
+        self.e_phoff = struct.unpack(self.Off, self.bf.read(self.OffSize))[0]
+        self.e_shoff = struct.unpack(self.Off, self.bf.read(self.OffSize))[0]
+        self.e_flags = struct.unpack(self.Word, self.bf.read(self.WordSize))[0]
+        self.e_ehsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+        self.e_phentsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+        self.e_phnum = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+        self.e_shentsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+        self.e_shnum = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+        self.e_shstrndx = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0]
+
+    def parse_sections(self) -> None:
+        self.bf.seek(self.e_shoff)
+        for _ in range(self.e_shnum):
+            self.sections.append(SectionHeader(self.bf, self.ptrsize, self.is_le))
+
+    def read_str(self) -> bytes:
+        arr = []
+        x = self.bf.read(1)
+        while x != b'\0':
+            arr.append(x)
+            x = self.bf.read(1)
+            if x == b'':
+                raise RuntimeError('Tried to read past the end of the file')
+        return b''.join(arr)
+
+    def find_section(self, target_name: bytes) -> T.Optional[SectionHeader]:
+        section_names = self.sections[self.e_shstrndx]
+        for i in self.sections:
+            self.bf.seek(section_names.sh_offset + i.sh_name)
+            name = self.read_str()
+            if name == target_name:
+                return i
+        return None
+
+    def parse_dynamic(self) -> None:
+        sec = self.find_section(b'.dynamic')
+        if sec is None:
+            return
+        self.bf.seek(sec.sh_offset)
+        while True:
+            e = DynamicEntry(self.bf, self.ptrsize, self.is_le)
+            self.dynamic.append(e)
+            if e.d_tag == 0:
+                break
+
+    @generate_list
+    def get_section_names(self) -> T.Generator[str, None, None]:
+        section_names = self.sections[self.e_shstrndx]
+        for i in self.sections:
+            self.bf.seek(section_names.sh_offset + i.sh_name)
+            yield self.read_str().decode()
+
+    def get_soname(self) -> T.Optional[str]:
+        soname = None
+        strtab = None
+        for i in self.dynamic:
+            if i.d_tag == DT_SONAME:
+                soname = i
+            if i.d_tag == DT_STRTAB:
+                strtab = i
+        if soname is None or strtab is None:
+            return None
+        self.bf.seek(strtab.val + soname.val)
+        return self.read_str().decode()
+
+    def get_entry_offset(self, entrynum: int) -> T.Optional[int]:
+        sec = self.find_section(b'.dynstr')
+        for i in self.dynamic:
+            if i.d_tag == entrynum:
+                res = sec.sh_offset + i.val
+                assert isinstance(res, int)
+                return res
+        return None
+
+    def get_rpath(self) -> T.Optional[str]:
+        offset = self.get_entry_offset(DT_RPATH)
+        if offset is None:
+            return None
+        self.bf.seek(offset)
+        return self.read_str().decode()
+
+    def get_runpath(self) -> T.Optional[str]:
+        offset = self.get_entry_offset(DT_RUNPATH)
+        if offset is None:
+            return None
+        self.bf.seek(offset)
+        return self.read_str().decode()
+
+    @generate_list
+    def get_deps(self) -> T.Generator[str, None, None]:
+        sec = self.find_section(b'.dynstr')
+        for i in self.dynamic:
+            if i.d_tag == DT_NEEDED:
+                offset = sec.sh_offset + i.val
+                self.bf.seek(offset)
+                yield self.read_str().decode()
+
+    def fix_deps(self, prefix: bytes) -> None:
+        sec = self.find_section(b'.dynstr')
+        deps = []
+        for i in self.dynamic:
+            if i.d_tag == DT_NEEDED:
+                deps.append(i)
+        for i in deps:
+            offset = sec.sh_offset + i.val
+            self.bf.seek(offset)
+            name = self.read_str()
+            if name.startswith(prefix):
+                basename = name.rsplit(b'/', maxsplit=1)[-1]
+                padding = b'\0' * (len(name) - len(basename))
+                newname = basename + padding
+                assert len(newname) == len(name)
+                self.bf.seek(offset)
+                self.bf.write(newname)
+
+    def fix_rpath(self, fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: bytes) -> None:
+        # The path to search for can be either rpath or runpath.
+        # Fix both of them to be sure.
+        self.fix_rpathtype_entry(fname, rpath_dirs_to_remove, new_rpath, DT_RPATH)
+        self.fix_rpathtype_entry(fname, rpath_dirs_to_remove, new_rpath, DT_RUNPATH)
+
+    def fix_rpathtype_entry(self, fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: bytes, entrynum: int) -> None:
+        rp_off = self.get_entry_offset(entrynum)
+        if rp_off is None:
+            if self.verbose:
+                print(f'File {fname!r} does not have an rpath. It should be a fully static executable.')
+            return
+        self.bf.seek(rp_off)
+
+        old_rpath = self.read_str()
+        # Some rpath entries may come from multiple sources.
+        # Only add each one once.
+        new_rpaths = OrderedSet()  # type: OrderedSet[bytes]
+        if new_rpath:
+            new_rpaths.update(new_rpath.split(b':'))
+        if old_rpath:
+            # Filter out build-only rpath entries
+            # added by get_link_dep_subdirs() or
+            # specified by user with build_rpath.
+            for rpath_dir in old_rpath.split(b':'):
+                if not (rpath_dir in rpath_dirs_to_remove or
+                        rpath_dir == (b'X' * len(rpath_dir))):
+                    if rpath_dir:
+                        new_rpaths.add(rpath_dir)
+
+        # Prepend user-specified new entries while preserving the ones that came from pkgconfig etc.
+        new_rpath = b':'.join(new_rpaths)
+
+        if len(old_rpath) < len(new_rpath):
+            msg = "New rpath must not be longer than the old one.\n Old: {}\n New: {}".format(old_rpath.decode('utf-8'), new_rpath.decode('utf-8'))
+            sys.exit(msg)
+        # The linker does read-only string deduplication. If there is a
+        # string that shares a suffix with the rpath, they might get
+        # deduped. This means changing the rpath string might break something
+        # completely unrelated. This has already happened once with X.org.
+        # Thus we want to keep this change as small as possible to minimize
+        # the chance of obliterating other strings. It might still happen
+        # but our behavior is identical to what chrpath does and it has
+        # been in use for ages so based on that this should be rare.
+        if not new_rpath:
+            self.remove_rpath_entry(entrynum)
+        else:
+            self.bf.seek(rp_off)
+            self.bf.write(new_rpath)
+            self.bf.write(b'\0')
+
+    def remove_rpath_entry(self, entrynum: int) -> None:
+        sec = self.find_section(b'.dynamic')
+        if sec is None:
+            return None
+        for (i, entry) in enumerate(self.dynamic):
+            if entry.d_tag == entrynum:
+                rpentry = self.dynamic[i]
+                rpentry.d_tag = 0
+                self.dynamic = self.dynamic[:i] + self.dynamic[i + 1:] + [rpentry]
+                break
+        # DT_MIPS_RLD_MAP_REL is relative to the offset of the tag. Adjust it consequently.
+        for entry in self.dynamic[i:]:
+            if entry.d_tag == DT_MIPS_RLD_MAP_REL:
+                entry.val += 2 * (self.ptrsize // 8)
+                break
+        self.bf.seek(sec.sh_offset)
+        for entry in self.dynamic:
+            entry.write(self.bf)
+        return None
+
+def fix_elf(fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: T.Optional[bytes], verbose: bool = True) -> None:
+    if new_rpath is not None:
+        with Elf(fname, verbose) as e:
+            # note: e.get_rpath() and e.get_runpath() may be useful
+            e.fix_rpath(fname, rpath_dirs_to_remove, new_rpath)
+
+def get_darwin_rpaths_to_remove(fname: str) -> T.List[str]:
+    p, out, _ = Popen_safe(['otool', '-l', fname], stderr=subprocess.DEVNULL)
+    if p.returncode != 0:
+        raise subprocess.CalledProcessError(p.returncode, p.args, out)
+    result = []
+    current_cmd = 'FOOBAR'
+    for line in out.split('\n'):
+        line = line.strip()
+        if ' ' not in line:
+            continue
+        key, value = line.strip().split(' ', 1)
+        if key == 'cmd':
+            current_cmd = value
+        if key == 'path' and current_cmd == 'LC_RPATH':
+            rp = value.split('(', 1)[0].strip()
+            result.append(rp)
+    return result
+
+def fix_darwin(fname: str, new_rpath: str, final_path: str, install_name_mappings: T.Dict[str, str]) -> None:
+    try:
+        rpaths = get_darwin_rpaths_to_remove(fname)
+    except subprocess.CalledProcessError:
+        # Otool failed, which happens when invoked on a
+        # non-executable target. Just return.
+        return
+    try:
+        args = []
+        if rpaths:
+            # TODO: fix this properly, not totally clear how
+            #
+            # removing rpaths from binaries on macOS has tons of
+            # weird edge cases. For instance, if the user provided
+            # a '-Wl,-rpath' argument in LDFLAGS that happens to
+            # coincide with an rpath generated from a dependency,
+            # this would cause installation failures, as meson would
+            # generate install_name_tool calls with two identical
+            # '-delete_rpath' arguments, which install_name_tool
+            # fails on. Because meson itself ensures that it never
+            # adds duplicate rpaths, duplicate rpaths necessarily
+            # come from user variables. The idea of using OrderedSet
+            # is to remove *at most one* duplicate RPATH entry. This
+            # is not optimal, as it only respects the user's choice
+            # partially: if they provided a non-duplicate '-Wl,-rpath'
+            # argument, it gets removed, if they provided a duplicate
+            # one, it remains in the final binary. A potentially optimal
+            # solution would split all user '-Wl,-rpath' arguments from
+            # LDFLAGS, and later add them back with '-add_rpath'.
+            for rp in OrderedSet(rpaths):
+                args += ['-delete_rpath', rp]
+            subprocess.check_call(['install_name_tool', fname] + args,
+                                  stdout=subprocess.DEVNULL,
+                                  stderr=subprocess.DEVNULL)
+        args = []
+        if new_rpath:
+            args += ['-add_rpath', new_rpath]
+        # Rewrite -install_name @rpath/libfoo.dylib to /path/to/libfoo.dylib
+        if fname.endswith('dylib'):
+            args += ['-id', final_path]
+        if install_name_mappings:
+            for old, new in install_name_mappings.items():
+                args += ['-change', old, new]
+        if args:
+            subprocess.check_call(['install_name_tool', fname] + args,
+                                  stdout=subprocess.DEVNULL,
+                                  stderr=subprocess.DEVNULL)
+    except Exception as err:
+        raise SystemExit(err)
+
+def fix_jar(fname: str) -> None:
+    subprocess.check_call(['jar', 'xf', fname, 'META-INF/MANIFEST.MF'])
+    with open('META-INF/MANIFEST.MF', 'r+', encoding='utf-8') as f:
+        lines = f.readlines()
+        f.seek(0)
+        for line in lines:
+            if not line.startswith('Class-Path:'):
+                f.write(line)
+        f.truncate()
+    # jar -um doesn't allow removing existing attributes.  Use -uM instead,
+    # which a) removes the existing manifest from the jar and b) disables
+    # special-casing for the manifest file, so we can re-add it as a normal
+    # archive member.  This puts the manifest at the end of the jar rather
+    # than the beginning, but the spec doesn't forbid that.
+    subprocess.check_call(['jar', 'ufM', fname, 'META-INF/MANIFEST.MF'])
+
+def fix_rpath(fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: T.Union[str, bytes], final_path: str, install_name_mappings: T.Dict[str, str], verbose: bool = True) -> None:
+    global INSTALL_NAME_TOOL  # pylint: disable=global-statement
+    # Static libraries, import libraries, debug information, headers, etc
+    # never have rpaths
+    # DLLs and EXE currently do not need runtime path fixing
+    if fname.endswith(('.a', '.lib', '.pdb', '.h', '.hpp', '.dll', '.exe')):
+        return
+    try:
+        if fname.endswith('.jar'):
+            fix_jar(fname)
+            return
+        if isinstance(new_rpath, str):
+            new_rpath = new_rpath.encode('utf8')
+        fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose)
+        return
+    except SystemExit as e:
+        if isinstance(e.code, int) and e.code == 0:
+            pass
+        else:
+            raise
+    # We don't look for this on import because it will do a useless PATH lookup
+    # on non-mac platforms. That can be expensive on some Windows machines
+    # (up to 30ms), which is significant with --only-changed. For details, see:
+    # https://github.com/mesonbuild/meson/pull/6612#discussion_r378581401
+    if INSTALL_NAME_TOOL is False:
+        INSTALL_NAME_TOOL = bool(shutil.which('install_name_tool'))
+    if INSTALL_NAME_TOOL:
+        if isinstance(new_rpath, bytes):
+            new_rpath = new_rpath.decode('utf8')
+        fix_darwin(fname, new_rpath, final_path, install_name_mappings)
diff --git a/vendored-meson/meson/mesonbuild/scripts/depscan.py b/vendored-meson/meson/mesonbuild/scripts/depscan.py
new file mode 100644
index 000000000000..3ae14c0b35f1
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/depscan.py
@@ -0,0 +1,208 @@
+# Copyright 2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import json
+import os
+import pathlib
+import pickle
+import re
+import sys
+import typing as T
+
+from ..backend.ninjabackend import ninja_quote
+from ..compilers.compilers import lang_suffixes
+
+if T.TYPE_CHECKING:
+    from ..backend.ninjabackend import TargetDependencyScannerInfo
+
+CPP_IMPORT_RE = re.compile(r'\w*import ([a-zA-Z0-9]+);')
+CPP_EXPORT_RE = re.compile(r'\w*export module ([a-zA-Z0-9]+);')
+
+FORTRAN_INCLUDE_PAT = r"^\s*include\s*['\"](\w+\.\w+)['\"]"
+FORTRAN_MODULE_PAT = r"^\s*\bmodule\b\s+(\w+)\s*(?:!+.*)*$"
+FORTRAN_SUBMOD_PAT = r"^\s*\bsubmodule\b\s*\((\w+:?\w+)\)\s*(\w+)"
+FORTRAN_USE_PAT = r"^\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)"
+
+FORTRAN_MODULE_RE = re.compile(FORTRAN_MODULE_PAT, re.IGNORECASE)
+FORTRAN_SUBMOD_RE = re.compile(FORTRAN_SUBMOD_PAT, re.IGNORECASE)
+FORTRAN_USE_RE = re.compile(FORTRAN_USE_PAT, re.IGNORECASE)
+
+class DependencyScanner:
+    def __init__(self, pickle_file: str, outfile: str, sources: T.List[str]):
+        with open(pickle_file, 'rb') as pf:
+            self.target_data: TargetDependencyScannerInfo = pickle.load(pf)
+        self.outfile = outfile
+        self.sources = sources
+        self.provided_by: T.Dict[str, str] = {}
+        self.exports: T.Dict[str, str] = {}
+        self.needs: T.Dict[str, T.List[str]] = {}
+        self.sources_with_exports: T.List[str] = []
+
+    def scan_file(self, fname: str) -> None:
+        suffix = os.path.splitext(fname)[1][1:]
+        if suffix != 'C':
+            suffix = suffix.lower()
+        if suffix in lang_suffixes['fortran']:
+            self.scan_fortran_file(fname)
+        elif suffix in lang_suffixes['cpp']:
+            self.scan_cpp_file(fname)
+        else:
+            sys.exit(f'Can not scan files with suffix .{suffix}.')
+
+    def scan_fortran_file(self, fname: str) -> None:
+        fpath = pathlib.Path(fname)
+        modules_in_this_file = set()
+        for line in fpath.read_text(encoding='utf-8', errors='ignore').split('\n'):
+            import_match = FORTRAN_USE_RE.match(line)
+            export_match = FORTRAN_MODULE_RE.match(line)
+            submodule_export_match = FORTRAN_SUBMOD_RE.match(line)
+            if import_match:
+                needed = import_match.group(1).lower()
+                # In Fortran you have an using declaration also for the module
+                # you define in the same file. Prevent circular dependencies.
+                if needed not in modules_in_this_file:
+                    if fname in self.needs:
+                        self.needs[fname].append(needed)
+                    else:
+                        self.needs[fname] = [needed]
+            if export_match:
+                exported_module = export_match.group(1).lower()
+                assert exported_module not in modules_in_this_file
+                modules_in_this_file.add(exported_module)
+                if exported_module in self.provided_by:
+                    raise RuntimeError(f'Multiple files provide module {exported_module}.')
+                self.sources_with_exports.append(fname)
+                self.provided_by[exported_module] = fname
+                self.exports[fname] = exported_module
+            if submodule_export_match:
+                # Store submodule "Foo" "Bar" as "foo:bar".
+                # A submodule declaration can be both an import and an export declaration:
+                #
+                # submodule (a1:a2) a3
+                #  - requires a1@a2.smod
+                #  - produces a1@a3.smod
+                parent_module_name_full = submodule_export_match.group(1).lower()
+                parent_module_name = parent_module_name_full.split(':')[0]
+                submodule_name = submodule_export_match.group(2).lower()
+                concat_name = f'{parent_module_name}:{submodule_name}'
+                self.sources_with_exports.append(fname)
+                self.provided_by[concat_name] = fname
+                self.exports[fname] = concat_name
+                # Fortran requires that the immediate parent module must be built
+                # before the current one. Thus:
+                #
+                # submodule (parent) parent   <- requires parent.mod (really parent.smod, but they are created at the same time)
+                # submodule (a1:a2) a3        <- requires a1@a2.smod
+                #
+                # a3 does not depend on the a1 parent module directly, only transitively.
+                if fname in self.needs:
+                    self.needs[fname].append(parent_module_name_full)
+                else:
+                    self.needs[fname] = [parent_module_name_full]
+
+    def scan_cpp_file(self, fname: str) -> None:
+        fpath = pathlib.Path(fname)
+        for line in fpath.read_text(encoding='utf-8', errors='ignore').split('\n'):
+            import_match = CPP_IMPORT_RE.match(line)
+            export_match = CPP_EXPORT_RE.match(line)
+            if import_match:
+                needed = import_match.group(1)
+                if fname in self.needs:
+                    self.needs[fname].append(needed)
+                else:
+                    self.needs[fname] = [needed]
+            if export_match:
+                exported_module = export_match.group(1)
+                if exported_module in self.provided_by:
+                    raise RuntimeError(f'Multiple files provide module {exported_module}.')
+                self.sources_with_exports.append(fname)
+                self.provided_by[exported_module] = fname
+                self.exports[fname] = exported_module
+
+    def objname_for(self, src: str) -> str:
+        objname = self.target_data.source2object[src]
+        assert isinstance(objname, str)
+        return objname
+
+    def module_name_for(self, src: str) -> str:
+        suffix = os.path.splitext(src)[1][1:].lower()
+        if suffix in lang_suffixes['fortran']:
+            exported = self.exports[src]
+            # Module foo:bar goes to a file name foo@bar.smod
+            # Module Foo goes to a file name foo.mod
+            namebase = exported.replace(':', '@')
+            if ':' in exported:
+                extension = 'smod'
+            else:
+                extension = 'mod'
+            return os.path.join(self.target_data.private_dir, f'{namebase}.{extension}')
+        elif suffix in lang_suffixes['cpp']:
+            return '{}.ifc'.format(self.exports[src])
+        else:
+            raise RuntimeError('Unreachable code.')
+
+    def scan(self) -> int:
+        for s in self.sources:
+            self.scan_file(s)
+        with open(self.outfile, 'w', encoding='utf-8') as ofile:
+            ofile.write('ninja_dyndep_version = 1\n')
+            for src in self.sources:
+                objfilename = self.objname_for(src)
+                mods_and_submods_needed = []
+                module_files_generated = []
+                module_files_needed = []
+                if src in self.sources_with_exports:
+                    module_files_generated.append(self.module_name_for(src))
+                if src in self.needs:
+                    for modname in self.needs[src]:
+                        if modname not in self.provided_by:
+                            # Nothing provides this module, we assume that it
+                            # comes from a dependency library somewhere and is
+                            # already built by the time this compilation starts.
+                            pass
+                        else:
+                            mods_and_submods_needed.append(modname)
+
+                for modname in mods_and_submods_needed:
+                    provider_src = self.provided_by[modname]
+                    provider_modfile = self.module_name_for(provider_src)
+                    # Prune self-dependencies
+                    if provider_src != src:
+                        module_files_needed.append(provider_modfile)
+
+                quoted_objfilename = ninja_quote(objfilename, True)
+                quoted_module_files_generated = [ninja_quote(x, True) for x in module_files_generated]
+                quoted_module_files_needed = [ninja_quote(x, True) for x in module_files_needed]
+                if quoted_module_files_generated:
+                    mod_gen = '| ' + ' '.join(quoted_module_files_generated)
+                else:
+                    mod_gen = ''
+                if quoted_module_files_needed:
+                    mod_dep = '| ' + ' '.join(quoted_module_files_needed)
+                else:
+                    mod_dep = ''
+                build_line = 'build {} {}: dyndep {}'.format(quoted_objfilename,
+                                                             mod_gen,
+                                                             mod_dep)
+                ofile.write(build_line + '\n')
+        return 0
+
+def run(args: T.List[str]) -> int:
+    assert len(args) == 3, 'got wrong number of arguments!'
+    pickle_file, outfile, jsonfile = args
+    with open(jsonfile, encoding='utf-8') as f:
+        sources = json.load(f)
+    scanner = DependencyScanner(pickle_file, outfile, sources)
+    return scanner.scan()
diff --git a/vendored-meson/meson/mesonbuild/scripts/dirchanger.py b/vendored-meson/meson/mesonbuild/scripts/dirchanger.py
new file mode 100644
index 000000000000..60c4f120ceb6
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/dirchanger.py
@@ -0,0 +1,30 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+'''CD into dir given as first argument and execute
+the command given in the rest of the arguments.'''
+
+import os, subprocess, sys
+import typing as T
+
+def run(args: T.List[str]) -> int:
+    dirname = args[0]
+    command = args[1:]
+
+    os.chdir(dirname)
+    return subprocess.call(command)
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff --git a/vendored-meson/meson/mesonbuild/scripts/env2mfile.py b/vendored-meson/meson/mesonbuild/scripts/env2mfile.py
new file mode 100755
index 000000000000..27a7dd969841
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/env2mfile.py
@@ -0,0 +1,402 @@
+# Copyright 2022 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import sys, os, subprocess, shutil
+import shlex
+import typing as T
+
+from .. import envconfig
+from .. import mlog
+from ..compilers import compilers
+from ..compilers.detect import defaults as compiler_names
+
+if T.TYPE_CHECKING:
+    import argparse
+
+def has_for_build() -> bool:
+    for cenv in envconfig.ENV_VAR_COMPILER_MAP.values():
+        if os.environ.get(cenv + '_FOR_BUILD'):
+            return True
+    return False
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+    parser.add_argument('--debarch', default=None,
+                        help='The dpkg architecture to generate.')
+    parser.add_argument('--gccsuffix', default="",
+                        help='A particular gcc version suffix if necessary.')
+    parser.add_argument('-o', required=True, dest='outfile',
+                        help='The output file.')
+    parser.add_argument('--cross', default=False, action='store_true',
+                        help='Generate a cross compilation file.')
+    parser.add_argument('--native', default=False, action='store_true',
+                        help='Generate a native compilation file.')
+    parser.add_argument('--system', default=None,
+                        help='Define system for cross compilation.')
+    parser.add_argument('--subsystem', default=None,
+                        help='Define subsystem for cross compilation.')
+    parser.add_argument('--kernel', default=None,
+                        help='Define kernel for cross compilation.')
+    parser.add_argument('--cpu', default=None,
+                        help='Define cpu for cross compilation.')
+    parser.add_argument('--cpu-family', default=None,
+                        help='Define cpu family for cross compilation.')
+    parser.add_argument('--endian', default='little', choices=['big', 'little'],
+                        help='Define endianness for cross compilation.')
+
+class MachineInfo:
+    def __init__(self) -> None:
+        self.compilers: T.Dict[str, T.List[str]] = {}
+        self.binaries: T.Dict[str, T.List[str]] = {}
+        self.properties: T.Dict[str, T.Union[str, T.List[str]]] = {}
+        self.compile_args: T.Dict[str, T.List[str]] = {}
+        self.link_args: T.Dict[str, T.List[str]] = {}
+        self.cmake: T.Dict[str, T.Union[str, T.List[str]]] = {}
+
+        self.system: T.Optional[str] = None
+        self.subsystem: T.Optional[str] = None
+        self.kernel: T.Optional[str] = None
+        self.cpu: T.Optional[str] = None
+        self.cpu_family: T.Optional[str] = None
+        self.endian: T.Optional[str] = None
+
+#parser = argparse.ArgumentParser(description='''Generate cross compilation definition file for the Meson build system.
+#
+#If you do not specify the --arch argument, Meson assumes that running
+#plain 'dpkg-architecture' will return correct information for the
+#host system.
+#
+#This script must be run in an environment where CPPFLAGS et al are set to the
+#same values used in the actual compilation.
+#'''
+#)
+
+def locate_path(program: str) -> T.List[str]:
+    if os.path.isabs(program):
+        return [program]
+    for d in os.get_exec_path():
+        f = os.path.join(d, program)
+        if os.access(f, os.X_OK):
+            return [f]
+    raise ValueError("%s not found on $PATH" % program)
+
+def write_args_line(ofile: T.TextIO, name: str, args: T.Union[str, T.List[str]]) -> None:
+    if len(args) == 0:
+        return
+    if isinstance(args, str):
+        ostr = name + "= '" + args + "'\n"
+    else:
+        ostr = name + ' = ['
+        ostr += ', '.join("'" + i + "'" for i in args)
+        ostr += ']\n'
+    ofile.write(ostr)
+
+def get_args_from_envvars(infos: MachineInfo) -> None:
+    cppflags = shlex.split(os.environ.get('CPPFLAGS', ''))
+    cflags = shlex.split(os.environ.get('CFLAGS', ''))
+    cxxflags = shlex.split(os.environ.get('CXXFLAGS', ''))
+    objcflags = shlex.split(os.environ.get('OBJCFLAGS', ''))
+    objcxxflags = shlex.split(os.environ.get('OBJCXXFLAGS', ''))
+    ldflags = shlex.split(os.environ.get('LDFLAGS', ''))
+
+    c_args = cppflags + cflags
+    cpp_args = cppflags + cxxflags
+    c_link_args = cflags + ldflags
+    cpp_link_args = cxxflags + ldflags
+
+    objc_args = cppflags + objcflags
+    objcpp_args = cppflags + objcxxflags
+    objc_link_args = objcflags + ldflags
+    objcpp_link_args = objcxxflags + ldflags
+
+    if c_args:
+        infos.compile_args['c'] = c_args
+    if c_link_args:
+        infos.link_args['c'] = c_link_args
+    if cpp_args:
+        infos.compile_args['cpp'] = cpp_args
+    if cpp_link_args:
+        infos.link_args['cpp'] = cpp_link_args
+    if objc_args:
+        infos.compile_args['objc'] = objc_args
+    if objc_link_args:
+        infos.link_args['objc'] = objc_link_args
+    if objcpp_args:
+        infos.compile_args['objcpp'] = objcpp_args
+    if objcpp_link_args:
+        infos.link_args['objcpp'] = objcpp_link_args
+
+deb_cpu_family_map = {
+    'mips64el': 'mips64',
+    'i686': 'x86',
+    'powerpc64le': 'ppc64',
+}
+
+deb_cpu_map = {
+    'armhf': 'arm7hlf',
+    'mips64el': 'mips64',
+    'powerpc64le': 'ppc64',
+}
+
+def deb_detect_cmake(infos: MachineInfo, data: T.Dict[str, str]) -> None:
+    system_name_map = {'linux': 'Linux', 'kfreebsd': 'kFreeBSD', 'hurd': 'GNU'}
+    system_processor_map = {'arm': 'armv7l', 'mips64el': 'mips64', 'powerpc64le': 'ppc64le'}
+
+    infos.cmake["CMAKE_C_COMPILER"] = infos.compilers['c']
+    try:
+        infos.cmake["CMAKE_CXX_COMPILER"] = infos.compilers['cpp']
+    except KeyError:
+        pass
+    infos.cmake["CMAKE_SYSTEM_NAME"] = system_name_map[data['DEB_HOST_ARCH_OS']]
+    infos.cmake["CMAKE_SYSTEM_PROCESSOR"] = system_processor_map.get(data['DEB_HOST_GNU_CPU'],
+                                                                     data['DEB_HOST_GNU_CPU'])
+
+def deb_compiler_lookup(infos: MachineInfo, compilerstems: T.List[T.Tuple[str, str]], host_arch: str, gccsuffix: str) -> None:
+    for langname, stem in compilerstems:
+        compilername = f'{host_arch}-{stem}{gccsuffix}'
+        try:
+            p = locate_path(compilername)
+            infos.compilers[langname] = p
+        except ValueError:
+            pass
+
+def detect_cross_debianlike(options: T.Any) -> MachineInfo:
+    if options.debarch == 'auto':
+        cmd = ['dpkg-architecture']
+    else:
+        cmd = ['dpkg-architecture', '-a' + options.debarch]
+    output = subprocess.check_output(cmd, universal_newlines=True,
+                                     stderr=subprocess.DEVNULL)
+    data = {}
+    for line in output.split('\n'):
+        line = line.strip()
+        if line == '':
+            continue
+        k, v = line.split('=', 1)
+        data[k] = v
+    host_arch = data['DEB_HOST_GNU_TYPE']
+    host_os = data['DEB_HOST_ARCH_OS']
+    host_subsystem = host_os
+    host_kernel = 'linux'
+    host_cpu_family = deb_cpu_family_map.get(data['DEB_HOST_GNU_CPU'],
+                                             data['DEB_HOST_GNU_CPU'])
+    host_cpu = deb_cpu_map.get(data['DEB_HOST_ARCH'],
+                               data['DEB_HOST_ARCH'])
+    host_endian = data['DEB_HOST_ARCH_ENDIAN']
+
+    compilerstems = [('c', 'gcc'),
+                     ('cpp', 'g++'),
+                     ('objc', 'gobjc'),
+                     ('objcpp', 'gobjc++')]
+    infos = MachineInfo()
+    deb_compiler_lookup(infos, compilerstems, host_arch, options.gccsuffix)
+    if len(infos.compilers) == 0:
+        print('Warning: no compilers were detected.')
+    infos.binaries['ar'] = locate_path("%s-ar" % host_arch)
+    infos.binaries['strip'] = locate_path("%s-strip" % host_arch)
+    infos.binaries['objcopy'] = locate_path("%s-objcopy" % host_arch)
+    infos.binaries['ld'] = locate_path("%s-ld" % host_arch)
+    try:
+        infos.binaries['cmake'] = locate_path("cmake")
+        deb_detect_cmake(infos, data)
+    except ValueError:
+        pass
+    try:
+        infos.binaries['pkgconfig'] = locate_path("%s-pkg-config" % host_arch)
+    except ValueError:
+        pass # pkg-config is optional
+    try:
+        infos.binaries['cups-config'] = locate_path("cups-config")
+    except ValueError:
+        pass
+    infos.system = host_os
+    infos.subsystem = host_subsystem
+    infos.kernel = host_kernel
+    infos.cpu_family = host_cpu_family
+    infos.cpu = host_cpu
+    infos.endian = host_endian
+
+    get_args_from_envvars(infos)
+    return infos
+
+def write_machine_file(infos: MachineInfo, ofilename: str, write_system_info: bool) -> None:
+    tmpfilename = ofilename + '~'
+    with open(tmpfilename, 'w', encoding='utf-8') as ofile:
+        ofile.write('[binaries]\n')
+        ofile.write('# Compilers\n')
+        for langname in sorted(infos.compilers.keys()):
+            compiler = infos.compilers[langname]
+            write_args_line(ofile, langname, compiler)
+        ofile.write('\n')
+
+        ofile.write('# Other binaries\n')
+        for exename in sorted(infos.binaries.keys()):
+            exe = infos.binaries[exename]
+            write_args_line(ofile, exename, exe)
+        ofile.write('\n')
+
+        ofile.write('[properties]\n')
+        all_langs = list(set(infos.compile_args.keys()).union(set(infos.link_args.keys())))
+        all_langs.sort()
+        for lang in all_langs:
+            if lang in infos.compile_args:
+                write_args_line(ofile, lang + '_args', infos.compile_args[lang])
+            if lang in infos.link_args:
+                write_args_line(ofile, lang + '_link_args', infos.link_args[lang])
+        for k, v in infos.properties.items():
+            write_args_line(ofile, k, v)
+        ofile.write('\n')
+
+        if infos.cmake:
+            ofile.write('[cmake]\n\n')
+            for k, v in infos.cmake.items():
+                write_args_line(ofile, k, v)
+            ofile.write('\n')
+
+        if write_system_info:
+            ofile.write('[host_machine]\n')
+            ofile.write(f"cpu = '{infos.cpu}'\n")
+            ofile.write(f"cpu_family = '{infos.cpu_family}'\n")
+            ofile.write(f"endian = '{infos.endian}'\n")
+            ofile.write(f"system = '{infos.system}'\n")
+            if infos.subsystem:
+                ofile.write(f"subsystem = '{infos.subsystem}'\n")
+            if infos.kernel:
+                ofile.write(f"kernel = '{infos.kernel}'\n")
+
+    os.replace(tmpfilename, ofilename)
+
+def detect_language_args_from_envvars(langname: str, envvar_suffix: str = '') -> T.Tuple[T.List[str], T.List[str]]:
+    ldflags = tuple(shlex.split(os.environ.get('LDFLAGS' + envvar_suffix, '')))
+    compile_args = []
+    if langname in compilers.CFLAGS_MAPPING:
+        compile_args = shlex.split(os.environ.get(compilers.CFLAGS_MAPPING[langname] + envvar_suffix, ''))
+    if langname in compilers.LANGUAGES_USING_CPPFLAGS:
+        cppflags = tuple(shlex.split(os.environ.get('CPPFLAGS' + envvar_suffix, '')))
+        lang_compile_args = list(cppflags) + compile_args
+    else:
+        lang_compile_args = compile_args
+    lang_link_args = list(ldflags) + compile_args
+    return (lang_compile_args, lang_link_args)
+
+def detect_compilers_from_envvars(envvar_suffix: str = '') -> MachineInfo:
+    infos = MachineInfo()
+    for langname, envvarname in envconfig.ENV_VAR_COMPILER_MAP.items():
+        compilerstr = os.environ.get(envvarname + envvar_suffix)
+        if not compilerstr:
+            continue
+        compiler = shlex.split(compilerstr)
+        infos.compilers[langname] = compiler
+        lang_compile_args, lang_link_args = detect_language_args_from_envvars(langname, envvar_suffix)
+        if lang_compile_args:
+            infos.compile_args[langname] = lang_compile_args
+        if lang_link_args:
+            infos.link_args[langname] = lang_link_args
+    return infos
+
+def detect_binaries_from_envvars(infos: MachineInfo, envvar_suffix: str = '') -> None:
+    for binname, envvar_base in envconfig.ENV_VAR_TOOL_MAP.items():
+        envvar = envvar_base + envvar_suffix
+        binstr = os.environ.get(envvar)
+        if binstr:
+            infos.binaries[binname] = shlex.split(binstr)
+
+def detect_properties_from_envvars(infos: MachineInfo, envvar_suffix: str = '') -> None:
+    var = os.environ.get('PKG_CONFIG_LIBDIR' + envvar_suffix)
+    if var is not None:
+        infos.properties['pkg_config_libdir'] = var
+    var = os.environ.get('PKG_CONFIG_SYSROOT_DIR' + envvar_suffix)
+    if var is not None:
+        infos.properties['sys_root'] = var
+
+def detect_cross_system(infos: MachineInfo, options: T.Any) -> None:
+    for optname in ('system', 'subsystem', 'kernel', 'cpu', 'cpu_family', 'endian'):
+        v = getattr(options, optname)
+        if not v:
+            mlog.error(f'Cross property "{optname}" missing, set it with --{optname.replace("_", "-")}.')
+            sys.exit(1)
+        setattr(infos, optname, v)
+
+def detect_cross_env(options: T.Any) -> MachineInfo:
+    if options.debarch:
+        print('Detecting cross environment via dpkg-reconfigure.')
+        infos = detect_cross_debianlike(options)
+    else:
+        print('Detecting cross environment via environment variables.')
+        infos = detect_compilers_from_envvars()
+        detect_cross_system(infos, options)
+    detect_binaries_from_envvars(infos)
+    detect_properties_from_envvars(infos)
+    return infos
+
+def add_compiler_if_missing(infos: MachineInfo, langname: str, exe_names: T.List[str]) -> None:
+    if langname in infos.compilers:
+        return
+    for exe_name in exe_names:
+        lookup = shutil.which(exe_name)
+        if not lookup:
+            continue
+        compflags, linkflags = detect_language_args_from_envvars(langname)
+        infos.compilers[langname] = [lookup]
+        if compflags:
+            infos.compile_args[langname] = compflags
+        if linkflags:
+            infos.link_args[langname] = linkflags
+        return
+
+def detect_missing_native_compilers(infos: MachineInfo) -> None:
+    # T.Any per-platform special detection should go here.
+    for langname, exes in compiler_names.items():
+        if langname not in envconfig.ENV_VAR_COMPILER_MAP:
+            continue
+        add_compiler_if_missing(infos, langname, exes)
+
+def detect_missing_native_binaries(infos: MachineInfo) -> None:
+    # T.Any per-platform special detection should go here.
+    for toolname in sorted(envconfig.ENV_VAR_TOOL_MAP.keys()):
+        if toolname in infos.binaries:
+            continue
+        exe = shutil.which(toolname)
+        if exe:
+            infos.binaries[toolname] = [exe]
+
+def detect_native_env(options: T.Any) -> MachineInfo:
+    use_for_build = has_for_build()
+    if use_for_build:
+        mlog.log('Using FOR_BUILD envvars for detection')
+        esuffix = '_FOR_BUILD'
+    else:
+        mlog.log('Using regular envvars for detection.')
+        esuffix = ''
+    infos = detect_compilers_from_envvars(esuffix)
+    detect_missing_native_compilers(infos)
+    detect_binaries_from_envvars(infos, esuffix)
+    detect_missing_native_binaries(infos)
+    detect_properties_from_envvars(infos, esuffix)
+    return infos
+
+def run(options: T.Any) -> None:
+    if options.cross and options.native:
+        sys.exit('You can only specify either --cross or --native, not both.')
+    if not options.cross and not options.native:
+        sys.exit('You must specify --cross or --native.')
+    mlog.notice('This functionality is experimental and subject to change.')
+    detect_cross = options.cross
+    if detect_cross:
+        infos = detect_cross_env(options)
+        write_system_info = True
+    else:
+        infos = detect_native_env(options)
+        write_system_info = False
+    write_machine_file(infos, options.outfile, write_system_info)
diff --git a/vendored-meson/meson/mesonbuild/scripts/externalproject.py b/vendored-meson/meson/mesonbuild/scripts/externalproject.py
new file mode 100644
index 000000000000..17c2251c3a45
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/externalproject.py
@@ -0,0 +1,116 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import argparse
+import multiprocessing
+import subprocess
+from pathlib import Path
+import typing as T
+
+from ..mesonlib import Popen_safe, split_args
+
+class ExternalProject:
+    def __init__(self, options: argparse.Namespace):
+        self.name = options.name
+        self.src_dir = options.srcdir
+        self.build_dir = options.builddir
+        self.install_dir = options.installdir
+        self.log_dir = options.logdir
+        self.verbose = options.verbose
+        self.stampfile = options.stampfile
+        self.depfile = options.depfile
+        self.make = split_args(options.make)
+
+    def write_depfile(self) -> None:
+        with open(self.depfile, 'w', encoding='utf-8') as f:
+            f.write(f'{self.stampfile}: \\\n')
+            for dirpath, dirnames, filenames in os.walk(self.src_dir):
+                dirnames[:] = [d for d in dirnames if not d.startswith('.')]
+                for fname in filenames:
+                    if fname.startswith('.'):
+                        continue
+                    path = Path(dirpath, fname)
+                    f.write('  {} \\\n'.format(path.as_posix().replace(' ', '\\ ')))
+
+    def write_stampfile(self) -> None:
+        with open(self.stampfile, 'w', encoding='utf-8'):
+            pass
+
+    def supports_jobs_flag(self) -> bool:
+        p, o, e = Popen_safe(self.make + ['--version'])
+        if p.returncode == 0 and ('GNU Make' in o or 'waf' in o):
+            return True
+        return False
+
+    def build(self) -> int:
+        make_cmd = self.make.copy()
+        if self.supports_jobs_flag():
+            make_cmd.append(f'-j{multiprocessing.cpu_count()}')
+        rc = self._run('build', make_cmd)
+        if rc != 0:
+            return rc
+
+        install_cmd = self.make.copy()
+        install_env = {}
+        install_env['DESTDIR'] = self.install_dir
+        install_cmd.append('install')
+        rc = self._run('install', install_cmd, install_env)
+        if rc != 0:
+            return rc
+
+        self.write_depfile()
+        self.write_stampfile()
+
+        return 0
+
+    def _run(self, step: str, command: T.List[str], env: T.Optional[T.Dict[str, str]] = None) -> int:
+        m = 'Running command ' + str(command) + ' in directory ' + str(self.build_dir) + '\n'
+        log_filename = Path(self.log_dir, f'{self.name}-{step}.log')
+        output = None
+        if not self.verbose:
+            output = open(log_filename, 'w', encoding='utf-8')
+            output.write(m + '\n')
+            output.flush()
+        else:
+            print(m)
+        run_env = os.environ.copy()
+        if env:
+            run_env.update(env)
+        p, o, e = Popen_safe(command, stderr=subprocess.STDOUT, stdout=output,
+                             cwd=self.build_dir,
+                             env=run_env)
+        if p.returncode != 0:
+            m = f'{step} step returned error code {p.returncode}.'
+            if not self.verbose:
+                m += '\nSee logs: ' + str(log_filename)
+            print(m)
+        return p.returncode
+
+def run(args: T.List[str]) -> int:
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--name')
+    parser.add_argument('--srcdir')
+    parser.add_argument('--builddir')
+    parser.add_argument('--installdir')
+    parser.add_argument('--logdir')
+    parser.add_argument('--make')
+    parser.add_argument('--verbose', action='store_true')
+    parser.add_argument('stampfile')
+    parser.add_argument('depfile')
+
+    options = parser.parse_args(args)
+    ep = ExternalProject(options)
+    return ep.build()
diff --git a/vendored-meson/meson/mesonbuild/scripts/gettext.py b/vendored-meson/meson/mesonbuild/scripts/gettext.py
new file mode 100644
index 000000000000..4a6bb9c8360a
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/gettext.py
@@ -0,0 +1,96 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import argparse
+import subprocess
+import typing as T
+
+parser = argparse.ArgumentParser()
+parser.add_argument('command')
+parser.add_argument('--pkgname', default='')
+parser.add_argument('--datadirs', default='')
+parser.add_argument('--langs', default='')
+parser.add_argument('--localedir', default='')
+parser.add_argument('--source-root', default='')
+parser.add_argument('--subdir', default='')
+parser.add_argument('--xgettext', default='xgettext')
+parser.add_argument('--msgmerge', default='msgmerge')
+parser.add_argument('--msginit', default='msginit')
+parser.add_argument('--extra-args', default='')
+
+def read_linguas(src_sub: str) -> T.List[str]:
+    # Syntax of this file is documented here:
+    # https://www.gnu.org/software/gettext/manual/html_node/po_002fLINGUAS.html
+    linguas = os.path.join(src_sub, 'LINGUAS')
+    try:
+        langs = []
+        with open(linguas, encoding='utf-8') as f:
+            for line in f:
+                line = line.strip()
+                if line and not line.startswith('#'):
+                    langs += line.split()
+        return langs
+    except (FileNotFoundError, PermissionError):
+        print(f'Could not find file LINGUAS in {src_sub}')
+        return []
+
+def run_potgen(src_sub: str, xgettext: str, pkgname: str, datadirs: str, args: T.List[str], source_root: str) -> int:
+    listfile = os.path.join(src_sub, 'POTFILES.in')
+    if not os.path.exists(listfile):
+        listfile = os.path.join(src_sub, 'POTFILES')
+        if not os.path.exists(listfile):
+            print('Could not find file POTFILES in %s' % src_sub)
+            return 1
+
+    child_env = os.environ.copy()
+    if datadirs:
+        child_env['GETTEXTDATADIRS'] = datadirs
+
+    ofile = os.path.join(src_sub, pkgname + '.pot')
+    return subprocess.call([xgettext, '--package-name=' + pkgname, '-p', src_sub, '-f', listfile,
+                            '-D', source_root, '-k_', '-o', ofile] + args,
+                           env=child_env)
+
+def update_po(src_sub: str, msgmerge: str, msginit: str, pkgname: str, langs: T.List[str]) -> int:
+    potfile = os.path.join(src_sub, pkgname + '.pot')
+    for l in langs:
+        pofile = os.path.join(src_sub, l + '.po')
+        if os.path.exists(pofile):
+            subprocess.check_call([msgmerge, '-q', '-o', pofile, pofile, potfile])
+        else:
+            subprocess.check_call([msginit, '--input', potfile, '--output-file', pofile, '--locale', l, '--no-translator'])
+    return 0
+
+def run(args: T.List[str]) -> int:
+    options = parser.parse_args(args)
+    subcmd = options.command
+    langs = options.langs.split('@@') if options.langs else None
+    extra_args = options.extra_args.split('@@') if options.extra_args else []
+    subdir = options.subdir
+    src_sub = os.path.join(options.source_root, subdir)
+
+    if not langs:
+        langs = read_linguas(src_sub)
+
+    if subcmd == 'pot':
+        return run_potgen(src_sub, options.xgettext, options.pkgname, options.datadirs, extra_args, options.source_root)
+    elif subcmd == 'update_po':
+        if run_potgen(src_sub, options.xgettext, options.pkgname, options.datadirs, extra_args, options.source_root) != 0:
+            return 1
+        return update_po(src_sub, options.msgmerge, options.msginit, options.pkgname, langs)
+    else:
+        print('Unknown subcommand.')
+        return 1
diff --git a/vendored-meson/meson/mesonbuild/scripts/gtkdochelper.py b/vendored-meson/meson/mesonbuild/scripts/gtkdochelper.py
new file mode 100644
index 000000000000..ded952d55a5b
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/gtkdochelper.py
@@ -0,0 +1,296 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import sys, os
+import subprocess
+import shutil
+import argparse
+from ..mesonlib import MesonException, Popen_safe, is_windows, is_cygwin, split_args
+from . import destdir_join
+import typing as T
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument('--sourcedir', dest='sourcedir')
+parser.add_argument('--builddir', dest='builddir')
+parser.add_argument('--subdir', dest='subdir')
+parser.add_argument('--headerdirs', dest='headerdirs')
+parser.add_argument('--mainfile', dest='mainfile')
+parser.add_argument('--modulename', dest='modulename')
+parser.add_argument('--moduleversion', dest='moduleversion')
+parser.add_argument('--htmlargs', dest='htmlargs', default='')
+parser.add_argument('--scanargs', dest='scanargs', default='')
+parser.add_argument('--scanobjsargs', dest='scanobjsargs', default='')
+parser.add_argument('--gobjects-types-file', dest='gobject_typesfile', default='')
+parser.add_argument('--fixxrefargs', dest='fixxrefargs', default='')
+parser.add_argument('--mkdbargs', dest='mkdbargs', default='')
+parser.add_argument('--ld', dest='ld', default='')
+parser.add_argument('--cc', dest='cc', default='')
+parser.add_argument('--ldflags', dest='ldflags', default='')
+parser.add_argument('--cflags', dest='cflags', default='')
+parser.add_argument('--content-files', dest='content_files', default='')
+parser.add_argument('--expand-content-files', dest='expand_content_files', default='')
+parser.add_argument('--html-assets', dest='html_assets', default='')
+parser.add_argument('--ignore-headers', dest='ignore_headers', default='')
+parser.add_argument('--namespace', dest='namespace', default='')
+parser.add_argument('--mode', dest='mode', default='')
+parser.add_argument('--installdir', dest='install_dir')
+parser.add_argument('--run', dest='run', default='')
+for tool in ['scan', 'scangobj', 'mkdb', 'mkhtml', 'fixxref']:
+    program_name = 'gtkdoc-' + tool
+    parser.add_argument('--' + program_name, dest=program_name.replace('-', '_'))
+
+def gtkdoc_run_check(cmd: T.List[str], cwd: str, library_paths: T.Optional[T.List[str]] = None) -> None:
+    if library_paths is None:
+        library_paths = []
+
+    env = dict(os.environ)
+    if is_windows() or is_cygwin():
+        if 'PATH' in env:
+            library_paths.extend(env['PATH'].split(os.pathsep))
+        env['PATH'] = os.pathsep.join(library_paths)
+    else:
+        if 'LD_LIBRARY_PATH' in env:
+            library_paths.extend(env['LD_LIBRARY_PATH'].split(os.pathsep))
+        env['LD_LIBRARY_PATH'] = os.pathsep.join(library_paths)
+
+    if is_windows():
+        cmd.insert(0, sys.executable)
+
+    # Put stderr into stdout since we want to print it out anyway.
+    # This preserves the order of messages.
+    p, out = Popen_safe(cmd, cwd=cwd, env=env, stderr=subprocess.STDOUT)[0:2]
+    if p.returncode != 0:
+        err_msg = [f"{cmd!r} failed with status {p.returncode:d}"]
+        if out:
+            err_msg.append(out)
+        raise MesonException('\n'.join(err_msg))
+    elif out:
+        # Unfortunately Windows cmd.exe consoles may be using a codepage
+        # that might choke print() with a UnicodeEncodeError, so let's
+        # ignore such errors for now, as a compromise as we are outputting
+        # console output here...
+        try:
+            print(out)
+        except UnicodeEncodeError:
+            pass
+
+def build_gtkdoc(source_root: str, build_root: str, doc_subdir: str, src_subdirs: T.List[str],
+                 main_file: str, module: str, module_version: str,
+                 html_args: T.List[str], scan_args: T.List[str], fixxref_args: T.List[str], mkdb_args: T.List[str],
+                 gobject_typesfile: str, scanobjs_args: T.List[str], run: str, ld: str, cc: str, ldflags: str, cflags: str,
+                 html_assets: T.List[str], content_files: T.List[str], ignore_headers: T.List[str], namespace: str,
+                 expand_content_files: T.List[str], mode: str, options: argparse.Namespace) -> None:
+    print("Building documentation for %s" % module)
+
+    src_dir_args = []
+    for src_dir in src_subdirs:
+        if not os.path.isabs(src_dir):
+            dirs = [os.path.join(source_root, src_dir),
+                    os.path.join(build_root, src_dir)]
+        else:
+            dirs = [src_dir]
+        src_dir_args += ['--source-dir=' + d for d in dirs]
+
+    doc_src = os.path.join(source_root, doc_subdir)
+    abs_out = os.path.join(build_root, doc_subdir)
+    htmldir = os.path.join(abs_out, 'html')
+
+    content_files += [main_file]
+    sections = os.path.join(doc_src, module + "-sections.txt")
+    if os.path.exists(sections):
+        content_files.append(sections)
+
+    overrides = os.path.join(doc_src, module + "-overrides.txt")
+    if os.path.exists(overrides):
+        content_files.append(overrides)
+
+    # Copy files to build directory
+    for f in content_files:
+        # FIXME: Use mesonlib.File objects so we don't need to do this
+        if not os.path.isabs(f):
+            f = os.path.join(doc_src, f)
+        elif os.path.commonpath([f, build_root]) == build_root:
+            continue
+        shutil.copyfile(f, os.path.join(abs_out, os.path.basename(f)))
+
+    shutil.rmtree(htmldir, ignore_errors=True)
+    try:
+        os.mkdir(htmldir)
+    except Exception:
+        pass
+
+    for f in html_assets:
+        f_abs = os.path.join(doc_src, f)
+        shutil.copyfile(f_abs, os.path.join(htmldir, os.path.basename(f_abs)))
+
+    scan_cmd = [options.gtkdoc_scan, '--module=' + module] + src_dir_args
+    if ignore_headers:
+        scan_cmd.append('--ignore-headers=' + ' '.join(ignore_headers))
+    # Add user-specified arguments
+    scan_cmd += scan_args
+    gtkdoc_run_check(scan_cmd, abs_out)
+
+    # Use the generated types file when available, otherwise gobject_typesfile
+    # would often be a path to source dir instead of build dir.
+    if '--rebuild-types' in scan_args:
+        gobject_typesfile = os.path.join(abs_out, module + '.types')
+
+    if gobject_typesfile:
+        scanobjs_cmd = [options.gtkdoc_scangobj] + scanobjs_args
+        scanobjs_cmd += ['--types=' + gobject_typesfile,
+                         '--module=' + module,
+                         '--run=' + run,
+                         '--cflags=' + cflags,
+                         '--ldflags=' + ldflags,
+                         '--cc=' + cc,
+                         '--ld=' + ld,
+                         '--output-dir=' + abs_out]
+
+        library_paths = []
+        for ldflag in split_args(ldflags):
+            if ldflag.startswith('-Wl,-rpath,'):
+                library_paths.append(ldflag[11:])
+
+        gtkdoc_run_check(scanobjs_cmd, build_root, library_paths)
+
+    # Make docbook files
+    if mode == 'auto':
+        # Guessing is probably a poor idea but these keeps compat
+        # with previous behavior
+        if main_file.endswith('sgml'):
+            modeflag = '--sgml-mode'
+        else:
+            modeflag = '--xml-mode'
+    elif mode == 'xml':
+        modeflag = '--xml-mode'
+    elif mode == 'sgml':
+        modeflag = '--sgml-mode'
+    else: # none
+        modeflag = None
+
+    mkdb_cmd = [options.gtkdoc_mkdb,
+                '--module=' + module,
+                '--output-format=xml',
+                '--expand-content-files=' + ' '.join(expand_content_files),
+                ] + src_dir_args
+    if namespace:
+        mkdb_cmd.append('--name-space=' + namespace)
+    if modeflag:
+        mkdb_cmd.append(modeflag)
+    if main_file:
+        # Yes, this is the flag even if the file is in xml.
+        mkdb_cmd.append('--main-sgml-file=' + main_file)
+    # Add user-specified arguments
+    mkdb_cmd += mkdb_args
+    gtkdoc_run_check(mkdb_cmd, abs_out)
+
+    # Make HTML documentation
+    mkhtml_cmd = [options.gtkdoc_mkhtml,
+                  '--path=' + os.pathsep.join((doc_src, abs_out)),
+                  module,
+                  ] + html_args
+    if main_file:
+        mkhtml_cmd.append('../' + main_file)
+    else:
+        mkhtml_cmd.append('%s-docs.xml' % module)
+    # html gen must be run in the HTML dir
+    gtkdoc_run_check(mkhtml_cmd, htmldir)
+
+    # Fix cross-references in HTML files
+    fixref_cmd = [options.gtkdoc_fixxref,
+                  '--module=' + module,
+                  '--module-dir=html'] + fixxref_args
+    gtkdoc_run_check(fixref_cmd, abs_out)
+
+    if module_version:
+        shutil.move(os.path.join(htmldir, f'{module}.devhelp2'),
+                    os.path.join(htmldir, f'{module}-{module_version}.devhelp2'))
+
+def install_gtkdoc(build_root: str, doc_subdir: str, install_prefix: str, datadir: str, module: str) -> None:
+    source = os.path.join(build_root, doc_subdir, 'html')
+    final_destination = os.path.join(install_prefix, datadir, module)
+    shutil.rmtree(final_destination, ignore_errors=True)
+    shutil.copytree(source, final_destination)
+
+def run(args: T.List[str]) -> int:
+    options = parser.parse_args(args)
+    if options.htmlargs:
+        htmlargs = options.htmlargs.split('@@')
+    else:
+        htmlargs = []
+    if options.scanargs:
+        scanargs = options.scanargs.split('@@')
+    else:
+        scanargs = []
+    if options.scanobjsargs:
+        scanobjsargs = options.scanobjsargs.split('@@')
+    else:
+        scanobjsargs = []
+    if options.fixxrefargs:
+        fixxrefargs = options.fixxrefargs.split('@@')
+    else:
+        fixxrefargs = []
+    if options.mkdbargs:
+        mkdbargs = options.mkdbargs.split('@@')
+    else:
+        mkdbargs = []
+    build_gtkdoc(
+        options.sourcedir,
+        options.builddir,
+        options.subdir,
+        options.headerdirs.split('@@'),
+        options.mainfile,
+        options.modulename,
+        options.moduleversion,
+        htmlargs,
+        scanargs,
+        fixxrefargs,
+        mkdbargs,
+        options.gobject_typesfile,
+        scanobjsargs,
+        options.run,
+        options.ld,
+        options.cc,
+        options.ldflags,
+        options.cflags,
+        options.html_assets.split('@@') if options.html_assets else [],
+        options.content_files.split('@@') if options.content_files else [],
+        options.ignore_headers.split('@@') if options.ignore_headers else [],
+        options.namespace,
+        options.expand_content_files.split('@@') if options.expand_content_files else [],
+        options.mode,
+        options)
+
+    if 'MESON_INSTALL_PREFIX' in os.environ:
+        destdir = os.environ.get('DESTDIR', '')
+        install_prefix = destdir_join(destdir, os.environ['MESON_INSTALL_PREFIX'])
+        if options.install_dir:
+            install_dir = options.install_dir
+        else:
+            install_dir = options.modulename
+            if options.moduleversion:
+                install_dir += '-' + options.moduleversion
+        if os.path.isabs(install_dir):
+            install_dir = destdir_join(destdir, install_dir)
+        install_gtkdoc(options.builddir,
+                       options.subdir,
+                       install_prefix,
+                       'share/gtk-doc/html',
+                       install_dir)
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff --git a/vendored-meson/meson/mesonbuild/scripts/hotdochelper.py b/vendored-meson/meson/mesonbuild/scripts/hotdochelper.py
new file mode 100644
index 000000000000..80365a035af3
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/hotdochelper.py
@@ -0,0 +1,40 @@
+from __future__ import annotations
+
+import os
+import shutil
+import subprocess
+
+from . import destdir_join
+
+import argparse
+import typing as T
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--install')
+parser.add_argument('--extra-extension-path', action="append", default=[])
+parser.add_argument('--name')
+parser.add_argument('--builddir')
+parser.add_argument('--project-version')
+parser.add_argument('--docdir')
+
+
+def run(argv: T.List[str]) -> int:
+    options, args = parser.parse_known_args(argv)
+    subenv = os.environ.copy()
+
+    val = subenv.get('PYTHONPATH')
+    paths = [val] if val else []
+    subenv['PYTHONPATH'] = os.pathsep.join(paths + options.extra_extension_path)
+
+    res = subprocess.call(args, cwd=options.builddir, env=subenv)
+    if res != 0:
+        return res
+
+    if options.install:
+        source_dir = os.path.join(options.builddir, options.install)
+        destdir = os.environ.get('DESTDIR', '')
+        installdir = destdir_join(destdir, options.docdir)
+
+        shutil.rmtree(installdir, ignore_errors=True)
+        shutil.copytree(source_dir, installdir)
+    return 0
diff --git a/vendored-meson/meson/mesonbuild/scripts/itstool.py b/vendored-meson/meson/mesonbuild/scripts/itstool.py
new file mode 100644
index 000000000000..0bfcaf9b5292
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/itstool.py
@@ -0,0 +1,86 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import argparse
+import subprocess
+import tempfile
+import shutil
+import typing as T
+
+parser = argparse.ArgumentParser()
+parser.add_argument('command')
+parser.add_argument('--build-dir', default='')
+parser.add_argument('-i', '--input', default='')
+parser.add_argument('-o', '--output', default='')
+parser.add_argument('--itstool', default='itstool')
+parser.add_argument('--its', action='append', default=[])
+parser.add_argument('mo_files', nargs='+')
+
+
+def run_join(build_dir: str, itstool: str, its_files: T.List[str], mo_files: T.List[str],
+             in_fname: str, out_fname: str) -> int:
+    if not mo_files:
+        print('No mo files specified to use for translation.')
+        return 1
+
+    with tempfile.TemporaryDirectory(prefix=os.path.basename(in_fname), dir=build_dir) as tmp_dir:
+        # copy mo files to have the right names so itstool can infer their locale
+        locale_mo_files = []
+        for mo_file in mo_files:
+            if not os.path.exists(mo_file):
+                print(f'Could not find mo file {mo_file}')
+                return 1
+            if not mo_file.endswith('.mo'):
+                print(f'File is not a mo file: {mo_file}')
+                return 1
+            # determine locale of this mo file
+            parts = mo_file.partition('LC_MESSAGES')
+            if parts[0].endswith((os.sep, '/')):
+                locale = os.path.basename(parts[0][:-1])
+            else:
+                locale = os.path.basename(parts[0])
+            tmp_mo_fname = os.path.join(tmp_dir, locale + '.mo')
+            shutil.copy(mo_file, tmp_mo_fname)
+            locale_mo_files.append(tmp_mo_fname)
+
+        cmd = [itstool]
+        if its_files:
+            for fname in its_files:
+                cmd.extend(['-i', fname])
+        cmd.extend(['-j', in_fname,
+                    '-o', out_fname])
+        cmd.extend(locale_mo_files)
+
+        return subprocess.call(cmd)
+
+
+def run(args: T.List[str]) -> int:
+    options = parser.parse_args(args)
+    command = options.command
+    build_dir = os.environ.get('MESON_BUILD_ROOT', os.getcwd())
+    if options.build_dir:
+        build_dir = options.build_dir
+
+    if command == 'join':
+        return run_join(build_dir,
+                        options.itstool,
+                        options.its,
+                        options.mo_files,
+                        options.input,
+                        options.output)
+    else:
+        print('Unknown subcommand.')
+        return 1
diff --git a/vendored-meson/meson/mesonbuild/scripts/meson_exe.py b/vendored-meson/meson/mesonbuild/scripts/meson_exe.py
new file mode 100644
index 000000000000..da89dd4f1a9b
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/meson_exe.py
@@ -0,0 +1,125 @@
+# Copyright 2013-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import sys
+import argparse
+import pickle
+import subprocess
+import typing as T
+import locale
+
+from ..utils.core import ExecutableSerialisation
+
+def buildparser() -> argparse.ArgumentParser:
+    parser = argparse.ArgumentParser(description='Custom executable wrapper for Meson. Do not run on your own, mmm\'kay?')
+    parser.add_argument('--unpickle')
+    parser.add_argument('--capture')
+    parser.add_argument('--feed')
+    return parser
+
+def run_exe(exe: ExecutableSerialisation, extra_env: T.Optional[T.Dict[str, str]] = None) -> int:
+    if exe.exe_wrapper:
+        if not exe.exe_wrapper.found():
+            raise AssertionError('BUG: Can\'t run cross-compiled exe {!r} with not-found '
+                                 'wrapper {!r}'.format(exe.cmd_args[0], exe.exe_wrapper.get_path()))
+        cmd_args = exe.exe_wrapper.get_command() + exe.cmd_args
+    else:
+        cmd_args = exe.cmd_args
+    child_env = os.environ.copy()
+    if extra_env:
+        child_env.update(extra_env)
+    if exe.env:
+        child_env = exe.env.get_env(child_env)
+    if exe.extra_paths:
+        child_env['PATH'] = (os.pathsep.join(exe.extra_paths + ['']) +
+                             child_env['PATH'])
+        if exe.exe_wrapper and any('wine' in i for i in exe.exe_wrapper.get_command()):
+            from .. import mesonlib
+            child_env['WINEPATH'] = mesonlib.get_wine_shortpath(
+                exe.exe_wrapper.get_command(),
+                ['Z:' + p for p in exe.extra_paths] + child_env.get('WINEPATH', '').split(';'),
+                exe.workdir
+            )
+
+    stdin = None
+    if exe.feed:
+        stdin = open(exe.feed, 'rb')
+
+    pipe = subprocess.PIPE
+    if exe.verbose:
+        assert not exe.capture, 'Cannot capture and print to console at the same time'
+        pipe = None
+
+    p = subprocess.Popen(cmd_args, env=child_env, cwd=exe.workdir,
+                         close_fds=False, stdin=stdin, stdout=pipe, stderr=pipe)
+    stdout, stderr = p.communicate()
+
+    if stdin is not None:
+        stdin.close()
+
+    if p.returncode == 0xc0000135:
+        # STATUS_DLL_NOT_FOUND on Windows indicating a common problem that is otherwise hard to diagnose
+        strerror = 'Failed to run due to missing DLLs, with path: ' + child_env['PATH']
+        raise FileNotFoundError(p.returncode, strerror, cmd_args)
+
+    if p.returncode != 0:
+        if exe.pickled:
+            print(f'while executing {cmd_args!r}')
+        if exe.verbose:
+            return p.returncode
+        encoding = locale.getpreferredencoding()
+        if not exe.capture:
+            print('--- stdout ---')
+            print(stdout.decode(encoding=encoding, errors='replace'))
+        print('--- stderr ---')
+        print(stderr.decode(encoding=encoding, errors='replace'))
+        return p.returncode
+
+    if exe.capture:
+        skip_write = False
+        try:
+            with open(exe.capture, 'rb') as cur:
+                skip_write = cur.read() == stdout
+        except OSError:
+            pass
+        if not skip_write:
+            with open(exe.capture, 'wb') as output:
+                output.write(stdout)
+
+    return 0
+
+def run(args: T.List[str]) -> int:
+    parser = buildparser()
+    options, cmd_args = parser.parse_known_args(args)
+    # argparse supports double dash to separate options and positional arguments,
+    # but the user has to remove it manually.
+    if cmd_args and cmd_args[0] == '--':
+        cmd_args = cmd_args[1:]
+    if not options.unpickle and not cmd_args:
+        parser.error('either --unpickle or executable and arguments are required')
+    if options.unpickle:
+        if cmd_args or options.capture or options.feed:
+            parser.error('no other arguments can be used with --unpickle')
+        with open(options.unpickle, 'rb') as f:
+            exe = pickle.load(f)
+            exe.pickled = True
+    else:
+        exe = ExecutableSerialisation(cmd_args, capture=options.capture, feed=options.feed)
+
+    return run_exe(exe)
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff --git a/vendored-meson/meson/mesonbuild/scripts/msgfmthelper.py b/vendored-meson/meson/mesonbuild/scripts/msgfmthelper.py
new file mode 100644
index 000000000000..28bcc8b83226
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/msgfmthelper.py
@@ -0,0 +1,39 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import argparse
+import subprocess
+import os
+import typing as T
+
+parser = argparse.ArgumentParser()
+parser.add_argument('input')
+parser.add_argument('output')
+parser.add_argument('type')
+parser.add_argument('podir')
+parser.add_argument('--msgfmt', default='msgfmt')
+parser.add_argument('--datadirs', default='')
+parser.add_argument('args', default=[], metavar='extra msgfmt argument', nargs='*')
+
+
+def run(args: T.List[str]) -> int:
+    options = parser.parse_args(args)
+    env = None
+    if options.datadirs:
+        env = os.environ.copy()
+        env.update({'GETTEXTDATADIRS': options.datadirs})
+    return subprocess.call([options.msgfmt, '--' + options.type, '-d', options.podir,
+                            '--template', options.input,  '-o', options.output] + options.args,
+                           env=env)
diff --git a/vendored-meson/meson/mesonbuild/scripts/pycompile.py b/vendored-meson/meson/mesonbuild/scripts/pycompile.py
new file mode 100644
index 000000000000..b236a1ca322e
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/pycompile.py
@@ -0,0 +1,65 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# ignore all lints for this file, since it is run by python2 as well
+
+# type: ignore
+# pylint: disable=deprecated-module
+
+import json, os, subprocess, sys
+from compileall import compile_file
+
+quiet = int(os.environ.get('MESON_INSTALL_QUIET', 0))
+
+def compileall(files):
+    for f in files:
+        # f is prefixed by {py_xxxxlib}, both variants are 12 chars
+        # the key is the middle 10 chars of the prefix
+        key = f[1:11].upper()
+        f = f[12:]
+
+        ddir = None
+        fullpath = os.environ['MESON_INSTALL_DESTDIR_'+key] + f
+        f = os.environ['MESON_INSTALL_'+key] + f
+
+        if fullpath != f:
+            ddir = os.path.dirname(f)
+
+        if os.path.isdir(fullpath):
+            for root, _, files in os.walk(fullpath):
+                if ddir is not None:
+                    ddir = root.replace(fullpath, f, 1)
+                for dirf in files:
+                    if dirf.endswith('.py'):
+                        fullpath = os.path.join(root, dirf)
+                        compile_file(fullpath, ddir, force=True, quiet=quiet)
+        else:
+            compile_file(fullpath, ddir, force=True, quiet=quiet)
+
+def run(manifest):
+    data_file = os.path.join(os.path.dirname(__file__), manifest)
+    with open(data_file, 'rb') as f:
+        dat = json.load(f)
+    compileall(dat)
+
+if __name__ == '__main__':
+    manifest = sys.argv[1]
+    run(manifest)
+    if len(sys.argv) > 2:
+        optlevel = int(sys.argv[2])
+        # python2 only needs one or the other
+        if optlevel == 1 or (sys.version_info >= (3,) and optlevel > 0):
+            subprocess.check_call([sys.executable, '-O'] + sys.argv[:2])
+        if optlevel == 2:
+            subprocess.check_call([sys.executable, '-OO'] + sys.argv[:2])
diff --git a/vendored-meson/meson/mesonbuild/scripts/python_info.py b/vendored-meson/meson/mesonbuild/scripts/python_info.py
new file mode 100755
index 000000000000..9c3a0791ac90
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/python_info.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+
+# ignore all lints for this file, since it is run by python2 as well
+
+# type: ignore
+# pylint: disable=deprecated-module
+
+import sys
+
+# do not inject mesonbuild.scripts
+# python -P would work too, but is exclusive to >=3.11
+if sys.path[0].endswith('scripts'):
+    del sys.path[0]
+
+import json, os, sysconfig
+import distutils.command.install
+
+def get_distutils_paths(scheme=None, prefix=None):
+    import distutils.dist
+    distribution = distutils.dist.Distribution()
+    install_cmd = distribution.get_command_obj('install')
+    if prefix is not None:
+        install_cmd.prefix = prefix
+    if scheme:
+        install_cmd.select_scheme(scheme)
+    install_cmd.finalize_options()
+    return {
+        'data': install_cmd.install_data,
+        'include': os.path.dirname(install_cmd.install_headers),
+        'platlib': install_cmd.install_platlib,
+        'purelib': install_cmd.install_purelib,
+        'scripts': install_cmd.install_scripts,
+    }
+
+# On Debian derivatives, the Python interpreter shipped by the distribution uses
+# a custom install scheme, deb_system, for the system install, and changes the
+# default scheme to a custom one pointing to /usr/local and replacing
+# site-packages with dist-packages.
+# See https://github.com/mesonbuild/meson/issues/8739.
+# XXX: We should be using sysconfig, but Debian only patches distutils.
+
+if 'deb_system' in distutils.command.install.INSTALL_SCHEMES:
+    paths = get_distutils_paths(scheme='deb_system')
+    install_paths = get_distutils_paths(scheme='deb_system', prefix='')
+else:
+    paths = sysconfig.get_paths()
+    empty_vars = {'base': '', 'platbase': '', 'installed_base': ''}
+    install_paths = sysconfig.get_paths(vars=empty_vars)
+
+def links_against_libpython():
+    from distutils.core import Distribution, Extension
+    cmd = Distribution().get_command_obj('build_ext')
+    cmd.ensure_finalized()
+    return bool(cmd.get_libraries(Extension('dummy', [])))
+
+variables = sysconfig.get_config_vars()
+variables.update({'base_prefix': getattr(sys, 'base_prefix', sys.prefix)})
+
+if sys.version_info < (3, 0):
+    suffix = variables.get('SO')
+elif sys.version_info < (3, 8, 7):
+    # https://bugs.python.org/issue?@action=redirect&bpo=39825
+    from distutils.sysconfig import get_config_var
+    suffix = get_config_var('EXT_SUFFIX')
+else:
+    suffix = variables.get('EXT_SUFFIX')
+
+print(json.dumps({
+  'variables': variables,
+  'paths': paths,
+  'sysconfig_paths': sysconfig.get_paths(),
+  'install_paths': install_paths,
+  'version': sysconfig.get_python_version(),
+  'platform': sysconfig.get_platform(),
+  'is_pypy': '__pypy__' in sys.builtin_module_names,
+  'is_venv': sys.prefix != variables['base_prefix'],
+  'link_libpython': links_against_libpython(),
+  'suffix': suffix,
+}))
diff --git a/vendored-meson/meson/mesonbuild/scripts/regen_checker.py b/vendored-meson/meson/mesonbuild/scripts/regen_checker.py
new file mode 100644
index 000000000000..f3a6f3cad484
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/regen_checker.py
@@ -0,0 +1,65 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import sys, os
+import pickle, subprocess
+import typing as T
+from ..coredata import CoreData
+from ..backend.backends import RegenInfo
+from ..mesonlib import OptionKey
+
+# This could also be used for XCode.
+
+def need_regen(regeninfo: RegenInfo, regen_timestamp: float) -> bool:
+    for i in regeninfo.depfiles:
+        curfile = os.path.join(regeninfo.build_dir, i)
+        curtime = os.stat(curfile).st_mtime
+        if curtime > regen_timestamp:
+            return True
+    # The timestamp file gets automatically deleted by MSBuild during a 'Clean' build.
+    # We must make sure to recreate it, even if we do not regenerate the solution.
+    # Otherwise, Visual Studio will always consider the REGEN project out of date.
+    print("Everything is up-to-date, regeneration of build files is not needed.")
+    from ..backend.vs2010backend import Vs2010Backend
+    Vs2010Backend.touch_regen_timestamp(regeninfo.build_dir)
+    return False
+
+def regen(regeninfo: RegenInfo, meson_command: T.List[str], backend: str) -> None:
+    cmd = meson_command + ['--internal',
+                           'regenerate',
+                           regeninfo.build_dir,
+                           regeninfo.source_dir,
+                           '--backend=' + backend]
+    subprocess.check_call(cmd)
+
+def run(args: T.List[str]) -> int:
+    private_dir = args[0]
+    dumpfile = os.path.join(private_dir, 'regeninfo.dump')
+    coredata_file = os.path.join(private_dir, 'coredata.dat')
+    with open(dumpfile, 'rb') as f:
+        regeninfo = pickle.load(f)
+        assert isinstance(regeninfo, RegenInfo)
+    with open(coredata_file, 'rb') as f:
+        coredata = pickle.load(f)
+        assert isinstance(coredata, CoreData)
+    backend = coredata.get_option(OptionKey('backend'))
+    assert isinstance(backend, str)
+    regen_timestamp = os.stat(dumpfile).st_mtime
+    if need_regen(regeninfo, regen_timestamp):
+        regen(regeninfo, coredata.meson_command, backend)
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff --git a/vendored-meson/meson/mesonbuild/scripts/run_tool.py b/vendored-meson/meson/mesonbuild/scripts/run_tool.py
new file mode 100644
index 000000000000..adf767a81779
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/run_tool.py
@@ -0,0 +1,68 @@
+# Copyright 2018 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import itertools
+import fnmatch
+from pathlib import Path
+from concurrent.futures import ThreadPoolExecutor
+
+from ..compilers import lang_suffixes
+from ..mesonlib import quiet_git
+import typing as T
+
+if T.TYPE_CHECKING:
+    import subprocess
+
+def parse_pattern_file(fname: Path) -> T.List[str]:
+    patterns = []
+    try:
+        with fname.open(encoding='utf-8') as f:
+            for line in f:
+                pattern = line.strip()
+                if pattern and not pattern.startswith('#'):
+                    patterns.append(pattern)
+    except FileNotFoundError:
+        pass
+    return patterns
+
+def run_tool(name: str, srcdir: Path, builddir: Path, fn: T.Callable[..., subprocess.CompletedProcess], *args: T.Any) -> int:
+    patterns = parse_pattern_file(srcdir / f'.{name}-include')
+    globs: T.Union[T.List[T.List[Path]], T.List[T.Generator[Path, None, None]]]
+    if patterns:
+        globs = [srcdir.glob(p) for p in patterns]
+    else:
+        r, o = quiet_git(['ls-files'], srcdir)
+        if r:
+            globs = [[Path(srcdir, f) for f in o.splitlines()]]
+        else:
+            globs = [srcdir.glob('**/*')]
+    patterns = parse_pattern_file(srcdir / f'.{name}-ignore')
+    ignore = [str(builddir / '*')]
+    ignore.extend([str(srcdir / p) for p in patterns])
+    suffixes = set(lang_suffixes['c']).union(set(lang_suffixes['cpp']))
+    suffixes.add('h')
+    suffixes = {f'.{s}' for s in suffixes}
+    futures = []
+    returncode = 0
+    with ThreadPoolExecutor() as e:
+        for f in itertools.chain(*globs):
+            strf = str(f)
+            if f.is_dir() or f.suffix not in suffixes or \
+                    any(fnmatch.fnmatch(strf, i) for i in ignore):
+                continue
+            futures.append(e.submit(fn, f, *args))
+        if futures:
+            returncode = max(x.result().returncode for x in futures)
+    return returncode
diff --git a/vendored-meson/meson/mesonbuild/scripts/scanbuild.py b/vendored-meson/meson/mesonbuild/scripts/scanbuild.py
new file mode 100644
index 000000000000..9cfc75dc388d
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/scanbuild.py
@@ -0,0 +1,66 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import subprocess
+import shutil
+import tempfile
+from ..environment import detect_ninja, detect_scanbuild
+from ..coredata import get_cmd_line_file, CmdLineFileParser
+from ..mesonlib import windows_proof_rmtree
+from pathlib import Path
+import typing as T
+from ast import literal_eval
+import os
+
+def scanbuild(exelist: T.List[str], srcdir: Path, blddir: Path, privdir: Path, logdir: Path, args: T.List[str]) -> int:
+    # In case of problems leave the temp directory around
+    # so it can be debugged.
+    scandir = tempfile.mkdtemp(dir=str(privdir))
+    meson_cmd = exelist + args
+    build_cmd = exelist + ['-o', str(logdir)] + detect_ninja() + ['-C', scandir]
+    rc = subprocess.call(meson_cmd + [str(srcdir), scandir])
+    if rc != 0:
+        return rc
+    rc = subprocess.call(build_cmd)
+    if rc == 0:
+        windows_proof_rmtree(scandir)
+    return rc
+
+def run(args: T.List[str]) -> int:
+    srcdir = Path(args[0])
+    bldpath = Path(args[1])
+    blddir = args[1]
+    meson_cmd = args[2:]
+    privdir = bldpath / 'meson-private'
+    logdir = bldpath / 'meson-logs' / 'scanbuild'
+    shutil.rmtree(str(logdir), ignore_errors=True)
+
+    # if any cross or native files are specified we should use them
+    cmd = get_cmd_line_file(blddir)
+    data = CmdLineFileParser()
+    data.read(cmd)
+
+    if 'cross_file' in data['properties']:
+        meson_cmd.extend([f'--cross-file={os.path.abspath(f)}' for f in literal_eval(data['properties']['cross_file'])])
+
+    if 'native_file' in data['properties']:
+        meson_cmd.extend([f'--native-file={os.path.abspath(f)}' for f in literal_eval(data['properties']['native_file'])])
+
+    exelist = detect_scanbuild()
+    if not exelist:
+        print('Could not execute scan-build "%s"' % ' '.join(exelist))
+        return 1
+
+    return scanbuild(exelist, srcdir, bldpath, privdir, logdir, meson_cmd)
diff --git a/vendored-meson/meson/mesonbuild/scripts/symbolextractor.py b/vendored-meson/meson/mesonbuild/scripts/symbolextractor.py
new file mode 100644
index 000000000000..08d839bfdb9f
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/symbolextractor.py
@@ -0,0 +1,333 @@
+# Copyright 2013-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script extracts the symbols of a given shared library
+# into a file. If the symbols have not changed, the file is not
+# touched. This information is used to skip link steps if the
+# ABI has not changed.
+
+# This file is basically a reimplementation of
+# http://cgit.freedesktop.org/libreoffice/core/commit/?id=3213cd54b76bc80a6f0516aac75a48ff3b2ad67c
+from __future__ import annotations
+
+import typing as T
+import os, sys
+from .. import mesonlib
+from .. import mlog
+from ..mesonlib import Popen_safe
+import argparse
+
+parser = argparse.ArgumentParser()
+
+parser.add_argument('--cross-host', default=None, dest='cross_host',
+                    help='cross compilation host platform')
+parser.add_argument('args', nargs='+')
+
+TOOL_WARNING_FILE = None
+RELINKING_WARNING = 'Relinking will always happen on source changes.'
+
+def dummy_syms(outfilename: str) -> None:
+    """Just touch it so relinking happens always."""
+    with open(outfilename, 'w', encoding='utf-8'):
+        pass
+
+def write_if_changed(text: str, outfilename: str) -> None:
+    try:
+        with open(outfilename, encoding='utf-8') as f:
+            oldtext = f.read()
+        if text == oldtext:
+            return
+    except FileNotFoundError:
+        pass
+    with open(outfilename, 'w', encoding='utf-8') as f:
+        f.write(text)
+
+def print_tool_warning(tools: T.List[str], msg: str, stderr: T.Optional[str] = None) -> None:
+    if os.path.exists(TOOL_WARNING_FILE):
+        return
+    m = f'{tools!r} {msg}. {RELINKING_WARNING}'
+    if stderr:
+        m += '\n' + stderr
+    mlog.warning(m)
+    # Write it out so we don't warn again
+    with open(TOOL_WARNING_FILE, 'w', encoding='utf-8'):
+        pass
+
+def get_tool(name: str) -> T.List[str]:
+    evar = name.upper()
+    if evar in os.environ:
+        import shlex
+        return shlex.split(os.environ[evar])
+    return [name]
+
+def call_tool(name: str, args: T.List[str], **kwargs: T.Any) -> str:
+    tool = get_tool(name)
+    try:
+        p, output, e = Popen_safe(tool + args, **kwargs)
+    except FileNotFoundError:
+        print_tool_warning(tool, 'not found')
+        return None
+    except PermissionError:
+        print_tool_warning(tool, 'not usable')
+        return None
+    if p.returncode != 0:
+        print_tool_warning(tool, 'does not work', e)
+        return None
+    return output
+
+def call_tool_nowarn(tool: T.List[str], **kwargs: T.Any) -> T.Tuple[str, str]:
+    try:
+        p, output, e = Popen_safe(tool, **kwargs)
+    except FileNotFoundError:
+        return None, '{!r} not found\n'.format(tool[0])
+    except PermissionError:
+        return None, '{!r} not usable\n'.format(tool[0])
+    if p.returncode != 0:
+        return None, e
+    return output, None
+
+def gnu_syms(libfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    output = call_tool('readelf', ['-d', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    result = [x for x in output.split('\n') if 'SONAME' in x]
+    assert len(result) <= 1
+    # Get a list of all symbols exported
+    output = call_tool('nm', ['--dynamic', '--extern-only', '--defined-only',
+                              '--format=posix', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    for line in output.split('\n'):
+        if not line:
+            continue
+        line_split = line.split()
+        entry = line_split[0:2]
+        # Store the size of symbols pointing to data objects so we relink
+        # when those change, which is needed because of copy relocations
+        # https://github.com/mesonbuild/meson/pull/7132#issuecomment-628353702
+        if line_split[1].upper() in {'B', 'G', 'D'} and len(line_split) >= 4:
+            entry += [line_split[3]]
+        result += [' '.join(entry)]
+    write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def solaris_syms(libfilename: str, outfilename: str) -> None:
+    # gnu_syms() works with GNU nm & readelf, not Solaris nm & elfdump
+    origpath = os.environ['PATH']
+    try:
+        os.environ['PATH'] = '/usr/gnu/bin:' + origpath
+        gnu_syms(libfilename, outfilename)
+    finally:
+        os.environ['PATH'] = origpath
+
+def osx_syms(libfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    output = call_tool('otool', ['-l', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    arr = output.split('\n')
+    for (i, val) in enumerate(arr):
+        if 'LC_ID_DYLIB' in val:
+            match = i
+            break
+    result = [arr[match + 2], arr[match + 5]] # Libreoffice stores all 5 lines but the others seem irrelevant.
+    # Get a list of all symbols exported
+    output = call_tool('nm', ['--extern-only', '--defined-only',
+                              '--format=posix', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    result += [' '.join(x.split()[0:2]) for x in output.split('\n')]
+    write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def openbsd_syms(libfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    output = call_tool('readelf', ['-d', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    result = [x for x in output.split('\n') if 'SONAME' in x]
+    assert len(result) <= 1
+    # Get a list of all symbols exported
+    output = call_tool('nm', ['-D', '-P', '-g', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    # U = undefined (cope with the lack of --defined-only option)
+    result += [' '.join(x.split()[0:2]) for x in output.split('\n') if x and not x.endswith('U ')]
+    write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def freebsd_syms(libfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    output = call_tool('readelf', ['-d', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    result = [x for x in output.split('\n') if 'SONAME' in x]
+    assert len(result) <= 1
+    # Get a list of all symbols exported
+    output = call_tool('nm', ['--dynamic', '--extern-only', '--defined-only',
+                              '--format=posix', libfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+
+    result += [' '.join(x.split()[0:2]) for x in output.split('\n')]
+    write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def cygwin_syms(impfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    output = call_tool('dlltool', ['-I', impfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    result = [output]
+    # Get the list of all symbols exported
+    output = call_tool('nm', ['--extern-only', '--defined-only',
+                              '--format=posix', impfilename])
+    if not output:
+        dummy_syms(outfilename)
+        return
+    for line in output.split('\n'):
+        if ' T ' not in line:
+            continue
+        result.append(line.split(maxsplit=1)[0])
+    write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def _get_implib_dllname(impfilename: str) -> T.Tuple[T.List[str], str]:
+    all_stderr = ''
+    # First try lib.exe, which is provided by MSVC. Then llvm-lib.exe, by LLVM
+    # for clang-cl.
+    #
+    # We cannot call get_tool on `lib` because it will look at the `LIB` env
+    # var which is the list of library paths MSVC will search for import
+    # libraries while linking.
+    for lib in (['lib'], get_tool('llvm-lib')):
+        output, e = call_tool_nowarn(lib + ['-list', impfilename])
+        if output:
+            # The output is a list of DLLs that each symbol exported by the import
+            # library is available in. We only build import libraries that point to
+            # a single DLL, so we can pick any of these. Pick the last one for
+            # simplicity. Also skip the last line, which is empty.
+            return output.split('\n')[-2:-1], None
+        all_stderr += e
+    # Next, try dlltool.exe which is provided by MinGW
+    output, e = call_tool_nowarn(get_tool('dlltool') + ['-I', impfilename])
+    if output:
+        return [output], None
+    all_stderr += e
+    return ([], all_stderr)
+
+def _get_implib_exports(impfilename: str) -> T.Tuple[T.List[str], str]:
+    all_stderr = ''
+    # Force dumpbin.exe to use en-US so we can parse its output
+    env = os.environ.copy()
+    env['VSLANG'] = '1033'
+    output, e = call_tool_nowarn(get_tool('dumpbin') + ['-exports', impfilename], env=env)
+    if output:
+        lines = output.split('\n')
+        start = lines.index('File Type: LIBRARY')
+        end = lines.index('  Summary')
+        return lines[start:end], None
+    all_stderr += e
+    # Next, try llvm-nm.exe provided by LLVM, then nm.exe provided by MinGW
+    for nm in ('llvm-nm', 'nm'):
+        output, e = call_tool_nowarn(get_tool(nm) + ['--extern-only', '--defined-only',
+                                                     '--format=posix', impfilename])
+        if output:
+            result = []
+            for line in output.split('\n'):
+                if ' T ' not in line or line.startswith('.text'):
+                    continue
+                result.append(line.split(maxsplit=1)[0])
+            return result, None
+        all_stderr += e
+    return ([], all_stderr)
+
+def windows_syms(impfilename: str, outfilename: str) -> None:
+    # Get the name of the library
+    result, e = _get_implib_dllname(impfilename)
+    if not result:
+        print_tool_warning(['lib', 'llvm-lib', 'dlltool'], 'do not work or were not found', e)
+        dummy_syms(outfilename)
+        return
+    # Get a list of all symbols exported
+    symbols, e = _get_implib_exports(impfilename)
+    if not symbols:
+        print_tool_warning(['dumpbin', 'llvm-nm', 'nm'], 'do not work or were not found', e)
+        dummy_syms(outfilename)
+        return
+    result += symbols
+    write_if_changed('\n'.join(result) + '\n', outfilename)
+
+def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host: str) -> None:
+    if cross_host is not None:
+        # In case of cross builds just always relink. In theory we could
+        # determine the correct toolset, but we would need to use the correct
+        # `nm`, `readelf`, etc, from the cross info which requires refactoring.
+        dummy_syms(outfilename)
+    elif mesonlib.is_linux() or mesonlib.is_hurd():
+        gnu_syms(libfilename, outfilename)
+    elif mesonlib.is_osx():
+        osx_syms(libfilename, outfilename)
+    elif mesonlib.is_openbsd():
+        openbsd_syms(libfilename, outfilename)
+    elif mesonlib.is_freebsd():
+        freebsd_syms(libfilename, outfilename)
+    elif mesonlib.is_netbsd():
+        freebsd_syms(libfilename, outfilename)
+    elif mesonlib.is_windows():
+        if os.path.isfile(impfilename):
+            windows_syms(impfilename, outfilename)
+        else:
+            # No import library. Not sure how the DLL is being used, so just
+            # rebuild everything that links to it every time.
+            dummy_syms(outfilename)
+    elif mesonlib.is_cygwin():
+        if os.path.isfile(impfilename):
+            cygwin_syms(impfilename, outfilename)
+        else:
+            # No import library. Not sure how the DLL is being used, so just
+            # rebuild everything that links to it every time.
+            dummy_syms(outfilename)
+    elif mesonlib.is_sunos():
+        solaris_syms(libfilename, outfilename)
+    else:
+        if not os.path.exists(TOOL_WARNING_FILE):
+            mlog.warning('Symbol extracting has not been implemented for this '
+                         'platform. ' + RELINKING_WARNING)
+            # Write it out so we don't warn again
+            with open(TOOL_WARNING_FILE, 'w', encoding='utf-8'):
+                pass
+        dummy_syms(outfilename)
+
+def run(args: T.List[str]) -> int:
+    global TOOL_WARNING_FILE  # pylint: disable=global-statement
+    options = parser.parse_args(args)
+    if len(options.args) != 4:
+        print('symbolextractor.py   ')
+        sys.exit(1)
+    privdir = os.path.join(options.args[0], 'meson-private')
+    TOOL_WARNING_FILE = os.path.join(privdir, 'symbolextractor_tool_warning_printed')
+    libfile = options.args[1]
+    impfile = options.args[2] # Only used on Windows
+    outfile = options.args[3]
+    gen_symbols(libfile, impfile, outfile, options.cross_host)
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff --git a/vendored-meson/meson/mesonbuild/scripts/tags.py b/vendored-meson/meson/mesonbuild/scripts/tags.py
new file mode 100644
index 000000000000..c85680705d0f
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/tags.py
@@ -0,0 +1,54 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import subprocess
+from pathlib import Path
+import typing as T
+
+def ls_as_bytestream() -> bytes:
+    if os.path.exists('.git'):
+        return subprocess.run(['git', 'ls-tree', '-r', '--name-only', 'HEAD'],
+                              stdout=subprocess.PIPE).stdout
+
+    files = [str(p) for p in Path('.').glob('**/*')
+             if not p.is_dir() and
+             not next((x for x in p.parts if x.startswith('.')), None)]
+    return '\n'.join(files).encode()
+
+
+def cscope() -> int:
+    ls = b'\n'.join([b'"%s"' % f for f in ls_as_bytestream().split()])
+    return subprocess.run(['cscope', '-v', '-b', '-i-'], input=ls).returncode
+
+
+def ctags() -> int:
+    ls = ls_as_bytestream()
+    return subprocess.run(['ctags', '-L-'], input=ls).returncode
+
+
+def etags() -> int:
+    ls = ls_as_bytestream()
+    return subprocess.run(['etags', '-'], input=ls).returncode
+
+
+def run(args: T.List[str]) -> int:
+    tool_name = args[0]
+    srcdir_name = args[1]
+    os.chdir(srcdir_name)
+    assert tool_name in {'cscope', 'ctags', 'etags'}
+    res = globals()[tool_name]()
+    assert isinstance(res, int)
+    return res
diff --git a/vendored-meson/meson/mesonbuild/scripts/test_loaded_modules.py b/vendored-meson/meson/mesonbuild/scripts/test_loaded_modules.py
new file mode 100644
index 000000000000..b3547beafe9d
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/test_loaded_modules.py
@@ -0,0 +1,11 @@
+import sys
+import json
+import typing as T
+from . import meson_exe
+
+# This script is used by run_unittests.py to verify we don't load too many
+# modules when executing a wrapped command.
+def run(args: T.List[str]) -> int:
+    meson_exe.run(args)
+    print(json.dumps(list(sys.modules.keys())))
+    return 0
diff --git a/vendored-meson/meson/mesonbuild/scripts/uninstall.py b/vendored-meson/meson/mesonbuild/scripts/uninstall.py
new file mode 100644
index 000000000000..8548766150a1
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/uninstall.py
@@ -0,0 +1,51 @@
+# Copyright 2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import os
+import typing as T
+
+logfile = 'meson-logs/install-log.txt'
+
+def do_uninstall(log: str) -> None:
+    failures = 0
+    successes = 0
+    for line in open(log, encoding='utf-8'):
+        if line.startswith('#'):
+            continue
+        fname = line.strip()
+        try:
+            if os.path.isdir(fname) and not os.path.islink(fname):
+                os.rmdir(fname)
+            else:
+                os.unlink(fname)
+            print('Deleted:', fname)
+            successes += 1
+        except Exception as e:
+            print(f'Could not delete {fname}: {e}.')
+            failures += 1
+    print('\nUninstall finished.\n')
+    print('Deleted:', successes)
+    print('Failed:', failures)
+    print('\nRemember that files created by custom scripts have not been removed.')
+
+def run(args: T.List[str]) -> int:
+    if args:
+        print('Weird error.')
+        return 1
+    if not os.path.exists(logfile):
+        print('Log file does not exist, no installation has been done.')
+        return 0
+    do_uninstall(logfile)
+    return 0
diff --git a/vendored-meson/meson/mesonbuild/scripts/vcstagger.py b/vendored-meson/meson/mesonbuild/scripts/vcstagger.py
new file mode 100644
index 000000000000..c484ee111859
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/vcstagger.py
@@ -0,0 +1,45 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import sys, os, subprocess, re
+import typing as T
+
+def config_vcs_tag(infile: str, outfile: str, fallback: str, source_dir: str, replace_string: str, regex_selector: str, cmd: T.List[str]) -> None:
+    try:
+        output = subprocess.check_output(cmd, cwd=source_dir)
+        new_string = re.search(regex_selector, output.decode()).group(1).strip()
+    except Exception:
+        new_string = fallback
+
+    with open(infile, encoding='utf-8') as f:
+        new_data = f.read().replace(replace_string, new_string)
+    if os.path.exists(outfile):
+        with open(outfile, encoding='utf-8') as f:
+            needs_update = f.read() != new_data
+    else:
+        needs_update = True
+    if needs_update:
+        with open(outfile, 'w', encoding='utf-8') as f:
+            f.write(new_data)
+
+
+def run(args: T.List[str]) -> int:
+    infile, outfile, fallback, source_dir, replace_string, regex_selector = args[0:6]
+    command = args[6:]
+    config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, command)
+    return 0
+
+if __name__ == '__main__':
+    sys.exit(run(sys.argv[1:]))
diff --git a/vendored-meson/meson/mesonbuild/scripts/yasm.py b/vendored-meson/meson/mesonbuild/scripts/yasm.py
new file mode 100644
index 000000000000..730ff3e1657c
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/scripts/yasm.py
@@ -0,0 +1,22 @@
+import argparse
+import subprocess
+import typing as T
+
+def run(args: T.List[str]) -> int:
+    parser = argparse.ArgumentParser()
+    parser.add_argument('--depfile')
+    options, yasm_cmd = parser.parse_known_args(args)
+
+    # Compile
+    returncode = subprocess.call(yasm_cmd)
+    if returncode != 0:
+        return returncode
+
+    # Capture and write depfile
+    ret = subprocess.run(yasm_cmd + ['-M'], capture_output=True)
+    if ret.returncode != 0:
+        return ret.returncode
+    with open(options.depfile, 'wb') as f:
+        f.write(ret.stdout)
+
+    return 0
diff --git a/vendored-meson/meson/mesonbuild/templates/__init__.py b/vendored-meson/meson/mesonbuild/templates/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/vendored-meson/meson/mesonbuild/templates/cpptemplates.py b/vendored-meson/meson/mesonbuild/templates/cpptemplates.py
new file mode 100644
index 000000000000..6e9776180597
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/templates/cpptemplates.py
@@ -0,0 +1,187 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_cpp_template = '''#include 
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        std::cout << argv[0] <<  "takes no arguments.\\n";
+        return 1;
+    }}
+    std::cout << "This is project " << PROJECT_NAME << ".\\n";
+    return 0;
+}}
+'''
+
+hello_cpp_meson_template = '''project('{project_name}', 'cpp',
+  version : '{version}',
+  default_options : ['warning_level=3',
+                     'cpp_std=c++14'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+lib_hpp_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+  #ifdef BUILDING_{utoken}
+    #define {utoken}_PUBLIC __declspec(dllexport)
+  #else
+    #define {utoken}_PUBLIC __declspec(dllimport)
+  #endif
+#else
+  #ifdef BUILDING_{utoken}
+      #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+  #else
+      #define {utoken}_PUBLIC
+  #endif
+#endif
+
+namespace {namespace} {{
+
+class {utoken}_PUBLIC {class_name} {{
+
+public:
+  {class_name}();
+  int get_number() const;
+
+private:
+
+  int number;
+
+}};
+
+}}
+
+'''
+
+lib_cpp_template = '''#include <{header_file}>
+
+namespace {namespace} {{
+
+{class_name}::{class_name}() {{
+    number = 6;
+}}
+
+int {class_name}::get_number() const {{
+  return number;
+}}
+
+}}
+'''
+
+lib_cpp_test_template = '''#include <{header_file}>
+#include 
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        std::cout << argv[0] << " takes no arguments.\\n";
+        return 1;
+    }}
+    {namespace}::{class_name} c;
+    return c.get_number() != 6;
+}}
+'''
+
+lib_cpp_meson_template = '''project('{project_name}', 'cpp',
+  version : '{version}',
+  default_options : ['warning_level=3', 'cpp_std=c++14'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+  cpp_args : lib_args,
+  gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+  name : '{project_name}',
+  filebase : '{ltoken}',
+  description : 'Meson sample project.',
+  subdirs : '{header_dir}',
+  libraries : shlib,
+  version : '{version}',
+)
+'''
+
+
+class CppProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.cpp'
+        open(source_name, 'w', encoding='utf-8').write(hello_cpp_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_cpp_meson_template.format(project_name=self.name,
+                                            exe_name=lowercase_token,
+                                            source_name=source_name,
+                                            version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        test_exe_name = lowercase_token + '_test'
+        namespace = lowercase_token
+        lib_hpp_name = lowercase_token + '.hpp'
+        lib_cpp_name = lowercase_token + '.cpp'
+        test_cpp_name = lowercase_token + '_test.cpp'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'class_name': class_name,
+                  'namespace': namespace,
+                  'header_file': lib_hpp_name,
+                  'source_file': lib_cpp_name,
+                  'test_source_file': test_cpp_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_hpp_name, 'w', encoding='utf-8').write(lib_hpp_template.format(**kwargs))
+        open(lib_cpp_name, 'w', encoding='utf-8').write(lib_cpp_template.format(**kwargs))
+        open(test_cpp_name, 'w', encoding='utf-8').write(lib_cpp_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_cpp_meson_template.format(**kwargs))
diff --git a/vendored-meson/meson/mesonbuild/templates/cstemplates.py b/vendored-meson/meson/mesonbuild/templates/cstemplates.py
new file mode 100644
index 000000000000..df09f61fd305
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/templates/cstemplates.py
@@ -0,0 +1,136 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_cs_template = '''using System;
+
+public class {class_name} {{
+    const String PROJECT_NAME = "{project_name}";
+
+    static int Main(String[] args) {{
+      if (args.Length > 0) {{
+          System.Console.WriteLine(String.Format("{project_name} takes no arguments.."));
+          return 1;
+      }}
+      Console.WriteLine(String.Format("This is project {{0}}.", PROJECT_NAME));
+      return 0;
+    }}
+}}
+
+'''
+
+hello_cs_meson_template = '''project('{project_name}', 'cs',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+lib_cs_template = '''
+public class {class_name} {{
+    private const int number = 6;
+
+    public int get_number() {{
+      return number;
+    }}
+}}
+
+'''
+
+lib_cs_test_template = '''using System;
+
+public class {class_test} {{
+    static int Main(String[] args) {{
+      if (args.Length > 0) {{
+          System.Console.WriteLine("{project_name} takes no arguments..");
+          return 1;
+      }}
+      {class_name} c = new {class_name}();
+      Boolean result = true;
+      return result.CompareTo(c.get_number() != 6);
+    }}
+}}
+
+'''
+
+lib_cs_meson_template = '''project('{project_name}', 'cs',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+stlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : stlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : stlib)
+
+'''
+
+
+class CSharpProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        source_name = uppercase_token[0] + lowercase_token[1:] + '.cs'
+        open(source_name, 'w', encoding='utf-8').write(
+            hello_cs_template.format(project_name=self.name,
+                                     class_name=class_name))
+        open('meson.build', 'w', encoding='utf-8').write(
+          hello_cs_meson_template.format(project_name=self.name,
+                                         exe_name=self.name,
+                                         source_name=source_name,
+                                         version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        class_test = uppercase_token[0] + lowercase_token[1:] + '_test'
+        project_test = lowercase_token + '_test'
+        lib_cs_name = uppercase_token[0] + lowercase_token[1:] + '.cs'
+        test_cs_name = uppercase_token[0] + lowercase_token[1:] + '_test.cs'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'class_test': class_test,
+                  'class_name': class_name,
+                  'source_file': lib_cs_name,
+                  'test_source_file': test_cs_name,
+                  'test_exe_name': project_test,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_cs_name, 'w', encoding='utf-8').write(lib_cs_template.format(**kwargs))
+        open(test_cs_name, 'w', encoding='utf-8').write(lib_cs_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_cs_meson_template.format(**kwargs))
diff --git a/vendored-meson/meson/mesonbuild/templates/ctemplates.py b/vendored-meson/meson/mesonbuild/templates/ctemplates.py
new file mode 100644
index 000000000000..0c7141a626f3
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/templates/ctemplates.py
@@ -0,0 +1,168 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+lib_h_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+  #ifdef BUILDING_{utoken}
+    #define {utoken}_PUBLIC __declspec(dllexport)
+  #else
+    #define {utoken}_PUBLIC __declspec(dllimport)
+  #endif
+#else
+  #ifdef BUILDING_{utoken}
+      #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+  #else
+      #define {utoken}_PUBLIC
+  #endif
+#endif
+
+int {utoken}_PUBLIC {function_name}();
+
+'''
+
+lib_c_template = '''#include <{header_file}>
+
+/* This function will not be exported and is not
+ * directly callable by users of this library.
+ */
+int internal_function() {{
+    return 0;
+}}
+
+int {function_name}() {{
+    return internal_function();
+}}
+'''
+
+lib_c_test_template = '''#include <{header_file}>
+#include 
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        printf("%s takes no arguments.\\n", argv[0]);
+        return 1;
+    }}
+    return {function_name}();
+}}
+'''
+
+lib_c_meson_template = '''project('{project_name}', 'c',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+  c_args : lib_args,
+  gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+  name : '{project_name}',
+  filebase : '{ltoken}',
+  description : 'Meson sample project.',
+  subdirs : '{header_dir}',
+  libraries : shlib,
+  version : '{version}',
+)
+'''
+
+hello_c_template = '''#include 
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        printf("%s takes no arguments.\\n", argv[0]);
+        return 1;
+    }}
+    printf("This is project %s.\\n", PROJECT_NAME);
+    return 0;
+}}
+'''
+
+hello_c_meson_template = '''project('{project_name}', 'c',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+
+class CProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.c'
+        open(source_name, 'w', encoding='utf-8').write(hello_c_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_c_meson_template.format(project_name=self.name,
+                                          exe_name=lowercase_token,
+                                          source_name=source_name,
+                                          version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_h_name = lowercase_token + '.h'
+        lib_c_name = lowercase_token + '.c'
+        test_c_name = lowercase_token + '_test.c'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'header_file': lib_h_name,
+                  'source_file': lib_c_name,
+                  'test_source_file': test_c_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_h_name, 'w', encoding='utf-8').write(lib_h_template.format(**kwargs))
+        open(lib_c_name, 'w', encoding='utf-8').write(lib_c_template.format(**kwargs))
+        open(test_c_name, 'w', encoding='utf-8').write(lib_c_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_c_meson_template.format(**kwargs))
diff --git a/vendored-meson/meson/mesonbuild/templates/cudatemplates.py b/vendored-meson/meson/mesonbuild/templates/cudatemplates.py
new file mode 100644
index 000000000000..63abd2be8751
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/templates/cudatemplates.py
@@ -0,0 +1,187 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_cuda_template = '''#include 
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        std::cout << argv[0] << " takes no arguments.\\n";
+        return 1;
+    }}
+    std::cout << "This is project " << PROJECT_NAME << ".\\n";
+    return 0;
+}}
+'''
+
+hello_cuda_meson_template = '''project('{project_name}', ['cuda', 'cpp'],
+  version : '{version}',
+  default_options : ['warning_level=3',
+                     'cpp_std=c++14'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+lib_h_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+  #ifdef BUILDING_{utoken}
+    #define {utoken}_PUBLIC __declspec(dllexport)
+  #else
+    #define {utoken}_PUBLIC __declspec(dllimport)
+  #endif
+#else
+  #ifdef BUILDING_{utoken}
+      #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+  #else
+      #define {utoken}_PUBLIC
+  #endif
+#endif
+
+namespace {namespace} {{
+
+class {utoken}_PUBLIC {class_name} {{
+
+public:
+  {class_name}();
+  int get_number() const;
+
+private:
+
+  int number;
+
+}};
+
+}}
+
+'''
+
+lib_cuda_template = '''#include <{header_file}>
+
+namespace {namespace} {{
+
+{class_name}::{class_name}() {{
+    number = 6;
+}}
+
+int {class_name}::get_number() const {{
+  return number;
+}}
+
+}}
+'''
+
+lib_cuda_test_template = '''#include <{header_file}>
+#include 
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        std::cout << argv[0] << " takes no arguments.\\n";
+        return 1;
+    }}
+    {namespace}::{class_name} c;
+    return c.get_number() != 6;
+}}
+'''
+
+lib_cuda_meson_template = '''project('{project_name}', ['cuda', 'cpp'],
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+  cpp_args : lib_args,
+  gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+  name : '{project_name}',
+  filebase : '{ltoken}',
+  description : 'Meson sample project.',
+  subdirs : '{header_dir}',
+  libraries : shlib,
+  version : '{version}',
+)
+'''
+
+
+class CudaProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.cu'
+        open(source_name, 'w', encoding='utf-8').write(hello_cuda_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_cuda_meson_template.format(project_name=self.name,
+                                             exe_name=lowercase_token,
+                                             source_name=source_name,
+                                             version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        test_exe_name = lowercase_token + '_test'
+        namespace = lowercase_token
+        lib_h_name = lowercase_token + '.h'
+        lib_cuda_name = lowercase_token + '.cu'
+        test_cuda_name = lowercase_token + '_test.cu'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'class_name': class_name,
+                  'namespace': namespace,
+                  'header_file': lib_h_name,
+                  'source_file': lib_cuda_name,
+                  'test_source_file': test_cuda_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_h_name, 'w', encoding='utf-8').write(lib_h_template.format(**kwargs))
+        open(lib_cuda_name, 'w', encoding='utf-8').write(lib_cuda_template.format(**kwargs))
+        open(test_cuda_name, 'w', encoding='utf-8').write(lib_cuda_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_cuda_meson_template.format(**kwargs))
diff --git a/vendored-meson/meson/mesonbuild/templates/dlangtemplates.py b/vendored-meson/meson/mesonbuild/templates/dlangtemplates.py
new file mode 100644
index 000000000000..81840fe111a0
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/templates/dlangtemplates.py
@@ -0,0 +1,145 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_d_template = '''module main;
+import std.stdio;
+
+enum PROJECT_NAME = "{project_name}";
+
+int main(string[] args) {{
+    if (args.length != 1){{
+        writefln("%s takes no arguments.\\n", args[0]);
+        return 1;
+    }}
+    writefln("This is project %s.\\n", PROJECT_NAME);
+    return 0;
+}}
+'''
+
+hello_d_meson_template = '''project('{project_name}', 'd',
+    version : '{version}',
+    default_options: ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+lib_d_template = '''module {module_file};
+
+/* This function will not be exported and is not
+ * directly callable by users of this library.
+ */
+int internal_function() {{
+    return 0;
+}}
+
+int {function_name}() {{
+    return internal_function();
+}}
+'''
+
+lib_d_test_template = '''module {module_file}_test;
+import std.stdio;
+import {module_file};
+
+
+int main(string[] args) {{
+    if (args.length != 1){{
+        writefln("%s takes no arguments.\\n", args[0]);
+        return 1;
+    }}
+    return {function_name}();
+}}
+'''
+
+lib_d_meson_template = '''project('{project_name}', 'd',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+stlib = static_library('{lib_name}', '{source_file}',
+  install : true,
+  gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : stlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : stlib)
+
+# Make this library usable from the Dlang
+# build system.
+dlang_mod = import('dlang')
+if find_program('dub', required: false).found()
+  dlang_mod.generate_dub_file(meson.project_name().to_lower(), meson.source_root(),
+    name : meson.project_name(),
+    license: meson.project_license(),
+    sourceFiles : '{source_file}',
+    description : 'Meson sample project.',
+    version : '{version}',
+  )
+endif
+'''
+
+
+class DlangProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.d'
+        open(source_name, 'w', encoding='utf-8').write(hello_d_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_d_meson_template.format(project_name=self.name,
+                                          exe_name=lowercase_token,
+                                          source_name=source_name,
+                                          version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_m_name = lowercase_token
+        lib_d_name = lowercase_token + '.d'
+        test_d_name = lowercase_token + '_test.d'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'module_file': lib_m_name,
+                  'source_file': lib_d_name,
+                  'test_source_file': test_d_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_d_name, 'w', encoding='utf-8').write(lib_d_template.format(**kwargs))
+        open(test_d_name, 'w', encoding='utf-8').write(lib_d_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_d_meson_template.format(**kwargs))
diff --git a/vendored-meson/meson/mesonbuild/templates/fortrantemplates.py b/vendored-meson/meson/mesonbuild/templates/fortrantemplates.py
new file mode 100644
index 000000000000..00cd509702e9
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/templates/fortrantemplates.py
@@ -0,0 +1,142 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+lib_fortran_template = '''
+! This procedure will not be exported and is not
+! directly callable by users of this library.
+
+module modfoo
+
+implicit none
+private
+public :: {function_name}
+
+contains
+
+integer function internal_function()
+    internal_function = 0
+end function internal_function
+
+integer function {function_name}()
+    {function_name} = internal_function()
+end function {function_name}
+
+end module modfoo
+'''
+
+lib_fortran_test_template = '''
+use modfoo
+
+print *,{function_name}()
+
+end program
+'''
+
+lib_fortran_meson_template = '''project('{project_name}', 'fortran',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+  fortran_args : lib_args,
+  gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+  name : '{project_name}',
+  filebase : '{ltoken}',
+  description : 'Meson sample project.',
+  subdirs : '{header_dir}',
+  libraries : shlib,
+  version : '{version}',
+)
+'''
+
+hello_fortran_template = '''
+implicit none
+
+character(len=*), parameter :: PROJECT_NAME = "{project_name}"
+
+print *,"This is project ", PROJECT_NAME
+
+end program
+'''
+
+hello_fortran_meson_template = '''project('{project_name}', 'fortran',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+
+class FortranProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.f90'
+        open(source_name, 'w', encoding='utf-8').write(hello_fortran_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_fortran_meson_template.format(project_name=self.name,
+                                                exe_name=lowercase_token,
+                                                source_name=source_name,
+                                                version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_fortran_name = lowercase_token + '.f90'
+        test_fortran_name = lowercase_token + '_test.f90'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'source_file': lib_fortran_name,
+                  'test_source_file': test_fortran_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_fortran_name, 'w', encoding='utf-8').write(lib_fortran_template.format(**kwargs))
+        open(test_fortran_name, 'w', encoding='utf-8').write(lib_fortran_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_fortran_meson_template.format(**kwargs))
diff --git a/vendored-meson/meson/mesonbuild/templates/javatemplates.py b/vendored-meson/meson/mesonbuild/templates/javatemplates.py
new file mode 100644
index 000000000000..58d48bac1d20
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/templates/javatemplates.py
@@ -0,0 +1,138 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_java_template = '''
+
+public class {class_name} {{
+    final static String PROJECT_NAME = "{project_name}";
+
+    public static void main (String args[]) {{
+        if(args.length != 0) {{
+            System.out.println(args + " takes no arguments.");
+            System.exit(0);
+        }}
+        System.out.println("This is project " + PROJECT_NAME + ".");
+        System.exit(0);
+    }}
+}}
+
+'''
+
+hello_java_meson_template = '''project('{project_name}', 'java',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = jar('{exe_name}', '{source_name}',
+  main_class : '{exe_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+lib_java_template = '''
+
+public class {class_name} {{
+    final static int number = 6;
+
+    public final int get_number() {{
+      return number;
+    }}
+}}
+
+'''
+
+lib_java_test_template = '''
+
+public class {class_test} {{
+    public static void main (String args[]) {{
+        if(args.length != 0) {{
+            System.out.println(args + " takes no arguments.");
+            System.exit(1);
+        }}
+
+        {class_name} c = new {class_name}();
+        Boolean result = true;
+        System.exit(result.compareTo(c.get_number() != 6));
+    }}
+}}
+
+'''
+
+lib_java_meson_template = '''project('{project_name}', 'java',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+jarlib = jar('{class_name}', '{source_file}',
+  main_class : '{class_name}',
+  install : true,
+)
+
+test_jar = jar('{class_test}', '{test_source_file}',
+  main_class : '{class_test}',
+  link_with : jarlib)
+test('{test_name}', test_jar)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : jarlib)
+'''
+
+
+class JavaProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        source_name = uppercase_token[0] + lowercase_token[1:] + '.java'
+        open(source_name, 'w', encoding='utf-8').write(
+            hello_java_template.format(project_name=self.name,
+                                       class_name=class_name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_java_meson_template.format(project_name=self.name,
+                                             exe_name=class_name,
+                                             source_name=source_name,
+                                             version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        class_test = uppercase_token[0] + lowercase_token[1:] + '_test'
+        lib_java_name = uppercase_token[0] + lowercase_token[1:] + '.java'
+        test_java_name = uppercase_token[0] + lowercase_token[1:] + '_test.java'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'class_test': class_test,
+                  'class_name': class_name,
+                  'source_file': lib_java_name,
+                  'test_source_file': test_java_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_java_name, 'w', encoding='utf-8').write(lib_java_template.format(**kwargs))
+        open(test_java_name, 'w', encoding='utf-8').write(lib_java_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_java_meson_template.format(**kwargs))
diff --git a/vendored-meson/meson/mesonbuild/templates/mesontemplates.py b/vendored-meson/meson/mesonbuild/templates/mesontemplates.py
new file mode 100644
index 000000000000..2868f7b53ec0
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/templates/mesontemplates.py
@@ -0,0 +1,77 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import argparse
+
+meson_executable_template = '''project('{project_name}', {language},
+  version : '{version}',
+  default_options : [{default_options}])
+
+executable('{executable}',
+           {sourcespec},{depspec}
+           install : true)
+'''
+
+
+meson_jar_template = '''project('{project_name}', '{language}',
+  version : '{version}',
+  default_options : [{default_options}])
+
+jar('{executable}',
+    {sourcespec},{depspec}
+    main_class: '{main_class}',
+    install : true)
+'''
+
+
+def create_meson_build(options: argparse.Namespace) -> None:
+    if options.type != 'executable':
+        raise SystemExit('\nGenerating a meson.build file from existing sources is\n'
+                         'supported only for project type "executable".\n'
+                         'Run meson init in an empty directory to create a sample project.')
+    default_options = ['warning_level=3']
+    if options.language == 'cpp':
+        # This shows how to set this very common option.
+        default_options += ['cpp_std=c++14']
+    # If we get a meson.build autoformatter one day, this code could
+    # be simplified quite a bit.
+    formatted_default_options = ', '.join(f"'{x}'" for x in default_options)
+    sourcespec = ',\n           '.join(f"'{x}'" for x in options.srcfiles)
+    depspec = ''
+    if options.deps:
+        depspec = '\n           dependencies : [\n              '
+        depspec += ',\n              '.join(f"dependency('{x}')"
+                                            for x in options.deps.split(','))
+        depspec += '],'
+    if options.language != 'java':
+        language = f"'{options.language}'" if options.language != 'vala' else ['c', 'vala']
+        content = meson_executable_template.format(project_name=options.name,
+                                                   language=language,
+                                                   version=options.version,
+                                                   executable=options.executable,
+                                                   sourcespec=sourcespec,
+                                                   depspec=depspec,
+                                                   default_options=formatted_default_options)
+    else:
+        content = meson_jar_template.format(project_name=options.name,
+                                            language=options.language,
+                                            version=options.version,
+                                            executable=options.executable,
+                                            main_class=options.name,
+                                            sourcespec=sourcespec,
+                                            depspec=depspec,
+                                            default_options=formatted_default_options)
+    open('meson.build', 'w', encoding='utf-8').write(content)
+    print('Generated meson.build file:\n\n' + content)
diff --git a/vendored-meson/meson/mesonbuild/templates/objcpptemplates.py b/vendored-meson/meson/mesonbuild/templates/objcpptemplates.py
new file mode 100644
index 000000000000..450f2b03035c
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/templates/objcpptemplates.py
@@ -0,0 +1,168 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+lib_h_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+  #ifdef BUILDING_{utoken}
+    #define {utoken}_PUBLIC __declspec(dllexport)
+  #else
+    #define {utoken}_PUBLIC __declspec(dllimport)
+  #endif
+#else
+  #ifdef BUILDING_{utoken}
+      #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+  #else
+      #define {utoken}_PUBLIC
+  #endif
+#endif
+
+int {utoken}_PUBLIC {function_name}();
+
+'''
+
+lib_objcpp_template = '''#import <{header_file}>
+
+/* This function will not be exported and is not
+ * directly callable by users of this library.
+ */
+int internal_function() {{
+    return 0;
+}}
+
+int {function_name}() {{
+    return internal_function();
+}}
+'''
+
+lib_objcpp_test_template = '''#import <{header_file}>
+#import 
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        std::cout << argv[0] << " takes no arguments." << std::endl;
+        return 1;
+    }}
+    return {function_name}();
+}}
+'''
+
+lib_objcpp_meson_template = '''project('{project_name}', 'objcpp',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+  objcpp_args : lib_args,
+  gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+  name : '{project_name}',
+  filebase : '{ltoken}',
+  description : 'Meson sample project.',
+  subdirs : '{header_dir}',
+  libraries : shlib,
+  version : '{version}',
+)
+'''
+
+hello_objcpp_template = '''#import 
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        std::cout << argv[0] << " takes no arguments." << std::endl;
+        return 1;
+    }}
+    std::cout << "This is project " << PROJECT_NAME << "." << std::endl;
+    return 0;
+}}
+'''
+
+hello_objcpp_meson_template = '''project('{project_name}', 'objcpp',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+
+class ObjCppProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.mm'
+        open(source_name, 'w', encoding='utf-8').write(hello_objcpp_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_objcpp_meson_template.format(project_name=self.name,
+                                               exe_name=lowercase_token,
+                                               source_name=source_name,
+                                               version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_h_name = lowercase_token + '.h'
+        lib_objcpp_name = lowercase_token + '.mm'
+        test_objcpp_name = lowercase_token + '_test.mm'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'header_file': lib_h_name,
+                  'source_file': lib_objcpp_name,
+                  'test_source_file': test_objcpp_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_h_name, 'w', encoding='utf-8').write(lib_h_template.format(**kwargs))
+        open(lib_objcpp_name, 'w', encoding='utf-8').write(lib_objcpp_template.format(**kwargs))
+        open(test_objcpp_name, 'w', encoding='utf-8').write(lib_objcpp_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_objcpp_meson_template.format(**kwargs))
diff --git a/vendored-meson/meson/mesonbuild/templates/objctemplates.py b/vendored-meson/meson/mesonbuild/templates/objctemplates.py
new file mode 100644
index 000000000000..2e035269f65c
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/templates/objctemplates.py
@@ -0,0 +1,168 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+lib_h_template = '''#pragma once
+#if defined _WIN32 || defined __CYGWIN__
+  #ifdef BUILDING_{utoken}
+    #define {utoken}_PUBLIC __declspec(dllexport)
+  #else
+    #define {utoken}_PUBLIC __declspec(dllimport)
+  #endif
+#else
+  #ifdef BUILDING_{utoken}
+      #define {utoken}_PUBLIC __attribute__ ((visibility ("default")))
+  #else
+      #define {utoken}_PUBLIC
+  #endif
+#endif
+
+int {utoken}_PUBLIC {function_name}();
+
+'''
+
+lib_objc_template = '''#import <{header_file}>
+
+/* This function will not be exported and is not
+ * directly callable by users of this library.
+ */
+int internal_function() {{
+    return 0;
+}}
+
+int {function_name}() {{
+    return internal_function();
+}}
+'''
+
+lib_objc_test_template = '''#import <{header_file}>
+#import 
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        printf("%s takes no arguments.\\n", argv[0]);
+        return 1;
+    }}
+    return {function_name}();
+}}
+'''
+
+lib_objc_meson_template = '''project('{project_name}', 'objc',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+lib_args = ['-DBUILDING_{utoken}']
+
+shlib = shared_library('{lib_name}', '{source_file}',
+  install : true,
+  objc_args : lib_args,
+  gnu_symbol_visibility : 'hidden',
+)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+
+# Make this library usable from the system's
+# package manager.
+install_headers('{header_file}', subdir : '{header_dir}')
+
+pkg_mod = import('pkgconfig')
+pkg_mod.generate(
+  name : '{project_name}',
+  filebase : '{ltoken}',
+  description : 'Meson sample project.',
+  subdirs : '{header_dir}',
+  libraries : shlib,
+  version : '{version}',
+)
+'''
+
+hello_objc_template = '''#import 
+
+#define PROJECT_NAME "{project_name}"
+
+int main(int argc, char **argv) {{
+    if(argc != 1) {{
+        printf("%s takes no arguments.\\n", argv[0]);
+        return 1;
+    }}
+    printf("This is project %s.\\n", PROJECT_NAME);
+    return 0;
+}}
+'''
+
+hello_objc_meson_template = '''project('{project_name}', 'objc',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+
+class ObjCProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.m'
+        open(source_name, 'w', encoding='utf-8').write(hello_objc_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_objc_meson_template.format(project_name=self.name,
+                                             exe_name=lowercase_token,
+                                             source_name=source_name,
+                                             version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_h_name = lowercase_token + '.h'
+        lib_objc_name = lowercase_token + '.m'
+        test_objc_name = lowercase_token + '_test.m'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'header_file': lib_h_name,
+                  'source_file': lib_objc_name,
+                  'test_source_file': test_objc_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_h_name, 'w', encoding='utf-8').write(lib_h_template.format(**kwargs))
+        open(lib_objc_name, 'w', encoding='utf-8').write(lib_objc_template.format(**kwargs))
+        open(test_objc_name, 'w', encoding='utf-8').write(lib_objc_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_objc_meson_template.format(**kwargs))
diff --git a/vendored-meson/meson/mesonbuild/templates/rusttemplates.py b/vendored-meson/meson/mesonbuild/templates/rusttemplates.py
new file mode 100644
index 000000000000..0dde5474ce99
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/templates/rusttemplates.py
@@ -0,0 +1,115 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+lib_rust_template = '''#![crate_name = "{crate_file}"]
+
+/* This function will not be exported and is not
+ * directly callable by users of this library.
+ */
+fn internal_function() -> i32 {{
+    return 0;
+}}
+
+pub fn {function_name}() -> i32 {{
+    return internal_function();
+}}
+'''
+
+lib_rust_test_template = '''extern crate {crate_file};
+
+fn main() {{
+    println!("printing: {{}}", {crate_file}::{function_name}());
+}}
+'''
+
+
+lib_rust_meson_template = '''project('{project_name}', 'rust',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+shlib = static_library('{lib_name}', '{source_file}', install : true)
+
+test_exe = executable('{test_exe_name}', '{test_source_file}',
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+'''
+
+hello_rust_template = '''
+fn main() {{
+    let project_name = "{project_name}";
+    println!("This is project {{}}.\\n", project_name);
+}}
+'''
+
+hello_rust_meson_template = '''project('{project_name}', 'rust',
+  version : '{version}',
+  default_options : ['warning_level=3'])
+
+exe = executable('{exe_name}', '{source_name}',
+  install : true)
+
+test('basic', exe)
+'''
+
+
+class RustProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.rs'
+        open(source_name, 'w', encoding='utf-8').write(hello_rust_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_rust_meson_template.format(project_name=self.name,
+                                             exe_name=lowercase_token,
+                                             source_name=source_name,
+                                             version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        function_name = lowercase_token[0:3] + '_func'
+        test_exe_name = lowercase_token + '_test'
+        lib_crate_name = lowercase_token
+        lib_rs_name = lowercase_token + '.rs'
+        test_rs_name = lowercase_token + '_test.rs'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'function_name': function_name,
+                  'crate_file': lib_crate_name,
+                  'source_file': lib_rs_name,
+                  'test_source_file': test_rs_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_rs_name, 'w', encoding='utf-8').write(lib_rust_template.format(**kwargs))
+        open(test_rs_name, 'w', encoding='utf-8').write(lib_rust_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_rust_meson_template.format(**kwargs))
diff --git a/vendored-meson/meson/mesonbuild/templates/samplefactory.py b/vendored-meson/meson/mesonbuild/templates/samplefactory.py
new file mode 100644
index 000000000000..195083764f91
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/templates/samplefactory.py
@@ -0,0 +1,44 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from mesonbuild.templates.valatemplates import ValaProject
+from mesonbuild.templates.fortrantemplates import FortranProject
+from mesonbuild.templates.objcpptemplates import ObjCppProject
+from mesonbuild.templates.dlangtemplates import DlangProject
+from mesonbuild.templates.rusttemplates import RustProject
+from mesonbuild.templates.javatemplates import JavaProject
+from mesonbuild.templates.cudatemplates import CudaProject
+from mesonbuild.templates.objctemplates import ObjCProject
+from mesonbuild.templates.cpptemplates import CppProject
+from mesonbuild.templates.cstemplates import CSharpProject
+from mesonbuild.templates.ctemplates import CProject
+from mesonbuild.templates.sampleimpl import SampleImpl
+
+import argparse
+
+def sameple_generator(options: argparse.Namespace) -> SampleImpl:
+    return {
+        'c': CProject,
+        'cpp': CppProject,
+        'cs': CSharpProject,
+        'cuda': CudaProject,
+        'objc': ObjCProject,
+        'objcpp': ObjCppProject,
+        'java': JavaProject,
+        'd': DlangProject,
+        'rust': RustProject,
+        'fortran': FortranProject,
+        'vala': ValaProject
+    }[options.language](options)
diff --git a/vendored-meson/meson/mesonbuild/templates/sampleimpl.py b/vendored-meson/meson/mesonbuild/templates/sampleimpl.py
new file mode 100644
index 000000000000..9702ae884b8e
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/templates/sampleimpl.py
@@ -0,0 +1,22 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+
+class SampleImpl:
+    def create_executable(self) -> None:
+        raise NotImplementedError('Sample implementation for "executable" not implemented!')
+
+    def create_library(self) -> None:
+        raise NotImplementedError('Sample implementation for "library" not implemented!')
diff --git a/vendored-meson/meson/mesonbuild/templates/valatemplates.py b/vendored-meson/meson/mesonbuild/templates/valatemplates.py
new file mode 100644
index 000000000000..ef9794dc2b17
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/templates/valatemplates.py
@@ -0,0 +1,125 @@
+# Copyright 2019 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from mesonbuild.templates.sampleimpl import SampleImpl
+import re
+
+
+hello_vala_template = '''void main (string[] args) {{
+    stdout.printf ("Hello {project_name}!\\n");
+}}
+'''
+
+hello_vala_meson_template = '''project('{project_name}', ['c', 'vala'],
+  version : '{version}')
+
+dependencies = [
+    dependency('glib-2.0'),
+    dependency('gobject-2.0'),
+]
+
+exe = executable('{exe_name}', '{source_name}', dependencies : dependencies,
+  install : true)
+
+test('basic', exe)
+'''
+
+
+lib_vala_template = '''namespace {namespace} {{
+    public int sum(int a, int b) {{
+        return(a + b);
+    }}
+
+    public int square(int a) {{
+        return(a * a);
+    }}
+}}
+'''
+
+lib_vala_test_template = '''using {namespace};
+
+public void main() {{
+    stdout.printf("\nTesting shlib");
+    stdout.printf("\n\t2 + 3 is %d", sum(2, 3));
+    stdout.printf("\n\t8 squared is %d\\n", square(8));
+}}
+'''
+
+lib_vala_meson_template = '''project('{project_name}', ['c', 'vala'],
+  version : '{version}')
+
+dependencies = [
+    dependency('glib-2.0'),
+    dependency('gobject-2.0'),
+]
+
+# These arguments are only used to build the shared library
+# not the executables that use the library.
+shlib = shared_library('foo', '{source_file}',
+               dependencies: dependencies,
+               install: true,
+               install_dir: [true, true, true])
+
+test_exe = executable('{test_exe_name}', '{test_source_file}', dependencies : dependencies,
+  link_with : shlib)
+test('{test_name}', test_exe)
+
+# Make this library usable as a Meson subproject.
+{ltoken}_dep = declare_dependency(
+  include_directories: include_directories('.'),
+  link_with : shlib)
+'''
+
+
+class ValaProject(SampleImpl):
+    def __init__(self, options):
+        super().__init__()
+        self.name = options.name
+        self.version = options.version
+
+    def create_executable(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        source_name = lowercase_token + '.vala'
+        open(source_name, 'w', encoding='utf-8').write(hello_vala_template.format(project_name=self.name))
+        open('meson.build', 'w', encoding='utf-8').write(
+            hello_vala_meson_template.format(project_name=self.name,
+                                             exe_name=lowercase_token,
+                                             source_name=source_name,
+                                             version=self.version))
+
+    def create_library(self) -> None:
+        lowercase_token = re.sub(r'[^a-z0-9]', '_', self.name.lower())
+        uppercase_token = lowercase_token.upper()
+        class_name = uppercase_token[0] + lowercase_token[1:]
+        test_exe_name = lowercase_token + '_test'
+        namespace = lowercase_token
+        lib_vala_name = lowercase_token + '.vala'
+        test_vala_name = lowercase_token + '_test.vala'
+        kwargs = {'utoken': uppercase_token,
+                  'ltoken': lowercase_token,
+                  'header_dir': lowercase_token,
+                  'class_name': class_name,
+                  'namespace': namespace,
+                  'source_file': lib_vala_name,
+                  'test_source_file': test_vala_name,
+                  'test_exe_name': test_exe_name,
+                  'project_name': self.name,
+                  'lib_name': lowercase_token,
+                  'test_name': lowercase_token,
+                  'version': self.version,
+                  }
+        open(lib_vala_name, 'w', encoding='utf-8').write(lib_vala_template.format(**kwargs))
+        open(test_vala_name, 'w', encoding='utf-8').write(lib_vala_test_template.format(**kwargs))
+        open('meson.build', 'w', encoding='utf-8').write(lib_vala_meson_template.format(**kwargs))
diff --git a/vendored-meson/meson/mesonbuild/utils/__init__.py b/vendored-meson/meson/mesonbuild/utils/__init__.py
new file mode 100644
index 000000000000..e69de29bb2d1
diff --git a/vendored-meson/meson/mesonbuild/utils/core.py b/vendored-meson/meson/mesonbuild/utils/core.py
new file mode 100644
index 000000000000..eee88b94d9aa
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/utils/core.py
@@ -0,0 +1,161 @@
+# Copyright 2012-2022 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Contains the strict minimum to run scripts.
+
+When the backend needs to call back into Meson during compilation for running
+scripts or wrapping commands, it is important to load as little python modules
+as possible for performance reasons.
+"""
+
+from __future__ import annotations
+from dataclasses import dataclass
+import os
+import abc
+import typing as T
+
+if T.TYPE_CHECKING:
+    from hashlib import _Hash
+    from typing_extensions import Literal
+    from ..mparser import BaseNode
+    from .. import programs
+
+    EnvironOrDict = T.Union[T.Dict[str, str], os._Environ[str]]
+
+    EnvInitValueType = T.Dict[str, T.Union[str, T.List[str]]]
+
+
+class MesonException(Exception):
+    '''Exceptions thrown by Meson'''
+
+    def __init__(self, *args: object, file: T.Optional[str] = None,
+                 lineno: T.Optional[int] = None, colno: T.Optional[int] = None):
+        super().__init__(*args)
+        self.file = file
+        self.lineno = lineno
+        self.colno = colno
+
+    @classmethod
+    def from_node(cls, *args: object, node: BaseNode) -> MesonException:
+        """Create a MesonException with location data from a BaseNode
+
+        :param node: A BaseNode to set location data from
+        :return: A Meson Exception instance
+        """
+        return cls(*args, file=node.filename, lineno=node.lineno, colno=node.colno)
+
+class MesonBugException(MesonException):
+    '''Exceptions thrown when there is a clear Meson bug that should be reported'''
+
+    def __init__(self, msg: str, file: T.Optional[str] = None,
+                 lineno: T.Optional[int] = None, colno: T.Optional[int] = None):
+        super().__init__(msg + '\n\n    This is a Meson bug and should be reported!',
+                         file=file, lineno=lineno, colno=colno)
+
+class HoldableObject(metaclass=abc.ABCMeta):
+    ''' Dummy base class for all objects that can be
+        held by an interpreter.baseobjects.ObjectHolder '''
+
+class EnvironmentVariables(HoldableObject):
+    def __init__(self, values: T.Optional[EnvInitValueType] = None,
+                 init_method: Literal['set', 'prepend', 'append'] = 'set', separator: str = os.pathsep) -> None:
+        self.envvars: T.List[T.Tuple[T.Callable[[T.Dict[str, str], str, T.List[str], str, T.Optional[str]], str], str, T.List[str], str]] = []
+        # The set of all env vars we have operations for. Only used for self.has_name()
+        self.varnames: T.Set[str] = set()
+
+        if values:
+            init_func = getattr(self, init_method)
+            for name, value in values.items():
+                v = value if isinstance(value, list) else [value]
+                init_func(name, v, separator)
+
+    def __repr__(self) -> str:
+        repr_str = "<{0}: {1}>"
+        return repr_str.format(self.__class__.__name__, self.envvars)
+
+    def hash(self, hasher: _Hash) -> None:
+        myenv = self.get_env({})
+        for key in sorted(myenv.keys()):
+            hasher.update(bytes(key, encoding='utf-8'))
+            hasher.update(b',')
+            hasher.update(bytes(myenv[key], encoding='utf-8'))
+            hasher.update(b';')
+
+    def has_name(self, name: str) -> bool:
+        return name in self.varnames
+
+    def get_names(self) -> T.Set[str]:
+        return self.varnames
+
+    def merge(self, other: EnvironmentVariables) -> None:
+        for method, name, values, separator in other.envvars:
+            self.varnames.add(name)
+            self.envvars.append((method, name, values, separator))
+
+    def set(self, name: str, values: T.List[str], separator: str = os.pathsep) -> None:
+        self.varnames.add(name)
+        self.envvars.append((self._set, name, values, separator))
+
+    def append(self, name: str, values: T.List[str], separator: str = os.pathsep) -> None:
+        self.varnames.add(name)
+        self.envvars.append((self._append, name, values, separator))
+
+    def prepend(self, name: str, values: T.List[str], separator: str = os.pathsep) -> None:
+        self.varnames.add(name)
+        self.envvars.append((self._prepend, name, values, separator))
+
+    @staticmethod
+    def _set(env: T.Dict[str, str], name: str, values: T.List[str], separator: str, default_value: T.Optional[str]) -> str:
+        return separator.join(values)
+
+    @staticmethod
+    def _append(env: T.Dict[str, str], name: str, values: T.List[str], separator: str, default_value: T.Optional[str]) -> str:
+        curr = env.get(name, default_value)
+        return separator.join(values if curr is None else [curr] + values)
+
+    @staticmethod
+    def _prepend(env: T.Dict[str, str], name: str, values: T.List[str], separator: str, default_value: T.Optional[str]) -> str:
+        curr = env.get(name, default_value)
+        return separator.join(values if curr is None else values + [curr])
+
+    def get_env(self, full_env: EnvironOrDict, default_fmt: T.Optional[str] = None) -> T.Dict[str, str]:
+        env = full_env.copy()
+        for method, name, values, separator in self.envvars:
+            default_value = default_fmt.format(name) if default_fmt else None
+            env[name] = method(env, name, values, separator, default_value)
+        return env
+
+
+@dataclass(eq=False)
+class ExecutableSerialisation:
+
+    # XXX: should capture and feed default to False, instead of None?
+
+    cmd_args: T.List[str]
+    env: T.Optional[EnvironmentVariables] = None
+    exe_wrapper: T.Optional['programs.ExternalProgram'] = None
+    workdir: T.Optional[str] = None
+    extra_paths: T.Optional[T.List] = None
+    capture: T.Optional[bool] = None
+    feed: T.Optional[bool] = None
+    tag: T.Optional[str] = None
+    verbose: bool = False
+    installdir_map: T.Optional[T.Dict[str, str]] = None
+
+    def __post_init__(self) -> None:
+        self.pickled = False
+        self.skip_if_destdir = False
+        self.subproject = ''
+        self.dry_run = False
diff --git a/vendored-meson/meson/mesonbuild/utils/platform.py b/vendored-meson/meson/mesonbuild/utils/platform.py
new file mode 100644
index 000000000000..4a3927ddf733
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/utils/platform.py
@@ -0,0 +1,38 @@
+# SPDX-license-identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""base classes providing no-op functionality.."""
+
+import os
+import typing as T
+
+from .. import mlog
+
+__all__ = ['BuildDirLock']
+
+# This needs to be inherited by the specific implementations to make type
+# checking happy
+class BuildDirLock:
+
+    def __init__(self, builddir: str) -> None:
+        self.lockfilename = os.path.join(builddir, 'meson-private/meson.lock')
+
+    def __enter__(self) -> None:
+        mlog.debug('Calling the no-op version of BuildDirLock')
+
+    def __exit__(self, *args: T.Any) -> None:
+        pass
diff --git a/vendored-meson/meson/mesonbuild/utils/posix.py b/vendored-meson/meson/mesonbuild/utils/posix.py
new file mode 100644
index 000000000000..51c3cd06048a
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/utils/posix.py
@@ -0,0 +1,43 @@
+# SPDX-license-identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Posix specific implementations of mesonlib functionality."""
+
+import fcntl
+import typing as T
+
+from .universal import MesonException
+from .platform import BuildDirLock as BuildDirLockBase
+
+__all__ = ['BuildDirLock']
+
+class BuildDirLock(BuildDirLockBase):
+
+    def __enter__(self) -> None:
+        self.lockfile = open(self.lockfilename, 'w', encoding='utf-8')
+        try:
+            fcntl.flock(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
+        except (BlockingIOError, PermissionError):
+            self.lockfile.close()
+            raise MesonException('Some other Meson process is already using this build directory. Exiting.')
+        except OSError as e:
+            self.lockfile.close()
+            raise MesonException(f'Failed to lock the build directory: {e.strerror}')
+
+    def __exit__(self, *args: T.Any) -> None:
+        fcntl.flock(self.lockfile, fcntl.LOCK_UN)
+        self.lockfile.close()
diff --git a/vendored-meson/meson/mesonbuild/utils/universal.py b/vendored-meson/meson/mesonbuild/utils/universal.py
new file mode 100644
index 000000000000..d78fadd438c6
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/utils/universal.py
@@ -0,0 +1,2431 @@
+# Copyright 2012-2020 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A library of random helper functionality."""
+
+from __future__ import annotations
+from pathlib import Path
+import argparse
+import enum
+import sys
+import stat
+import time
+import abc
+import platform, subprocess, operator, os, shlex, shutil, re
+import collections
+from functools import lru_cache, wraps, total_ordering
+from itertools import tee
+from tempfile import TemporaryDirectory, NamedTemporaryFile
+import typing as T
+import textwrap
+import copy
+import pickle
+import errno
+
+from mesonbuild import mlog
+from .core import MesonException, HoldableObject
+
+if T.TYPE_CHECKING:
+    from typing_extensions import Literal, Protocol
+
+    from .._typing import ImmutableListProtocol
+    from ..build import ConfigurationData
+    from ..coredata import KeyedOptionDictType, UserOption, StrOrBytesPath
+    from ..environment import Environment
+    from ..compilers.compilers import Compiler
+    from ..interpreterbase.baseobjects import SubProject
+
+    class _EnvPickleLoadable(Protocol):
+
+        environment: Environment
+
+    class _VerPickleLoadable(Protocol):
+
+        version: str
+
+    # A generic type for pickle_load. This allows any type that has either a
+    # .version or a .environment to be passed.
+    _PL = T.TypeVar('_PL', bound=T.Union[_EnvPickleLoadable, _VerPickleLoadable])
+
+FileOrString = T.Union['File', str]
+
+_T = T.TypeVar('_T')
+_U = T.TypeVar('_U')
+
+__all__ = [
+    'GIT',
+    'python_command',
+    'project_meson_versions',
+    'SecondLevelHolder',
+    'File',
+    'FileMode',
+    'GitException',
+    'LibType',
+    'MachineChoice',
+    'EnvironmentException',
+    'FileOrString',
+    'GitException',
+    'OptionKey',
+    'dump_conf_header',
+    'OptionOverrideProxy',
+    'OptionType',
+    'OrderedSet',
+    'PerMachine',
+    'PerMachineDefaultable',
+    'PerThreeMachine',
+    'PerThreeMachineDefaultable',
+    'ProgressBar',
+    'RealPathAction',
+    'TemporaryDirectoryWinProof',
+    'Version',
+    'check_direntry_issues',
+    'classify_unity_sources',
+    'current_vs_supports_modules',
+    'darwin_get_object_archs',
+    'default_libdir',
+    'default_libexecdir',
+    'default_prefix',
+    'default_datadir',
+    'default_includedir',
+    'default_infodir',
+    'default_localedir',
+    'default_mandir',
+    'default_sbindir',
+    'default_sysconfdir',
+    'detect_subprojects',
+    'detect_vcs',
+    'do_conf_file',
+    'do_conf_str',
+    'do_replacement',
+    'exe_exists',
+    'expand_arguments',
+    'extract_as_list',
+    'first',
+    'generate_list',
+    'get_compiler_for_source',
+    'get_filenames_templates_dict',
+    'get_variable_regex',
+    'get_wine_shortpath',
+    'git',
+    'has_path_sep',
+    'is_aix',
+    'is_android',
+    'is_ascii_string',
+    'is_cygwin',
+    'is_debianlike',
+    'is_dragonflybsd',
+    'is_freebsd',
+    'is_haiku',
+    'is_hurd',
+    'is_irix',
+    'is_linux',
+    'is_netbsd',
+    'is_openbsd',
+    'is_osx',
+    'is_qnx',
+    'is_sunos',
+    'is_windows',
+    'is_wsl',
+    'iter_regexin_iter',
+    'join_args',
+    'listify',
+    'partition',
+    'path_is_in_root',
+    'pickle_load',
+    'Popen_safe',
+    'Popen_safe_logged',
+    'quiet_git',
+    'quote_arg',
+    'relative_to_if_possible',
+    'relpath',
+    'replace_if_different',
+    'run_once',
+    'get_meson_command',
+    'set_meson_command',
+    'split_args',
+    'stringlistify',
+    'substitute_values',
+    'substring_is_in_list',
+    'typeslistify',
+    'verbose_git',
+    'version_compare',
+    'version_compare_condition_with_min',
+    'version_compare_many',
+    'search_version',
+    'windows_detect_native_arch',
+    'windows_proof_rm',
+    'windows_proof_rmtree',
+]
+
+
+# TODO: this is such a hack, this really should be either in coredata or in the
+# interpreter
+# {subproject: project_meson_version}
+project_meson_versions: T.DefaultDict[str, str] = collections.defaultdict(str)
+
+
+from glob import glob
+
+if getattr(sys, 'frozen', False) and hasattr(sys, '_MEIPASS'):
+    # using a PyInstaller bundle, e.g. the MSI installed executable
+    python_command = [sys.executable, 'runpython']
+else:
+    python_command = [sys.executable]
+_meson_command: T.Optional['ImmutableListProtocol[str]'] = None
+
+
+class EnvironmentException(MesonException):
+    '''Exceptions thrown while processing and creating the build environment'''
+
+class GitException(MesonException):
+    def __init__(self, msg: str, output: T.Optional[str] = None):
+        super().__init__(msg)
+        self.output = output.strip() if output else ''
+
+GIT = shutil.which('git')
+def git(cmd: T.List[str], workingdir: StrOrBytesPath, check: bool = False, **kwargs: T.Any) -> T.Tuple[subprocess.Popen[str], str, str]:
+    assert GIT is not None, 'Callers should make sure it exists'
+    cmd = [GIT, *cmd]
+    p, o, e = Popen_safe(cmd, cwd=workingdir, **kwargs)
+    if check and p.returncode != 0:
+        raise GitException('Git command failed: ' + str(cmd), e)
+    return p, o, e
+
+def quiet_git(cmd: T.List[str], workingdir: StrOrBytesPath, check: bool = False) -> T.Tuple[bool, str]:
+    if not GIT:
+        m = 'Git program not found.'
+        if check:
+            raise GitException(m)
+        return False, m
+    p, o, e = git(cmd, workingdir, check)
+    if p.returncode != 0:
+        return False, e
+    return True, o
+
+def verbose_git(cmd: T.List[str], workingdir: StrOrBytesPath, check: bool = False) -> bool:
+    if not GIT:
+        m = 'Git program not found.'
+        if check:
+            raise GitException(m)
+        return False
+    p, _, _ = git(cmd, workingdir, check, stdout=None, stderr=None)
+    return p.returncode == 0
+
+def set_meson_command(mainfile: str) -> None:
+    global _meson_command  # pylint: disable=global-statement
+    # On UNIX-like systems `meson` is a Python script
+    # On Windows `meson` and `meson.exe` are wrapper exes
+    if not mainfile.endswith('.py'):
+        _meson_command = [mainfile]
+    elif os.path.isabs(mainfile) and mainfile.endswith('mesonmain.py'):
+        # Can't actually run meson with an absolute path to mesonmain.py, it must be run as -m mesonbuild.mesonmain
+        _meson_command = python_command + ['-m', 'mesonbuild.mesonmain']
+    else:
+        # Either run uninstalled, or full path to meson-script.py
+        _meson_command = python_command + [mainfile]
+    # We print this value for unit tests.
+    if 'MESON_COMMAND_TESTS' in os.environ:
+        mlog.log(f'meson_command is {_meson_command!r}')
+
+
+def get_meson_command() -> T.Optional['ImmutableListProtocol[str]']:
+    return _meson_command
+
+
+def is_ascii_string(astring: T.Union[str, bytes]) -> bool:
+    try:
+        if isinstance(astring, str):
+            astring.encode('ascii')
+        elif isinstance(astring, bytes):
+            astring.decode('ascii')
+    except UnicodeDecodeError:
+        return False
+    return True
+
+
+def check_direntry_issues(direntry_array: T.Union[T.Iterable[T.Union[str, bytes]], str, bytes]) -> None:
+    import locale
+    # Warn if the locale is not UTF-8. This can cause various unfixable issues
+    # such as os.stat not being able to decode filenames with unicode in them.
+    # There is no way to reset both the preferred encoding and the filesystem
+    # encoding, so we can just warn about it.
+    e = locale.getpreferredencoding()
+    if e.upper() != 'UTF-8' and not is_windows():
+        if isinstance(direntry_array, (str, bytes)):
+            direntry_array = [direntry_array]
+        for de in direntry_array:
+            if is_ascii_string(de):
+                continue
+            mlog.warning(textwrap.dedent(f'''
+                You are using {e!r} which is not a Unicode-compatible
+                locale but you are trying to access a file system entry called {de!r} which is
+                not pure ASCII. This may cause problems.
+                '''))
+
+class SecondLevelHolder(HoldableObject, metaclass=abc.ABCMeta):
+    ''' A second level object holder. The primary purpose
+        of such objects is to hold multiple objects with one
+        default option. '''
+
+    @abc.abstractmethod
+    def get_default_object(self) -> HoldableObject: ...
+
+class FileMode:
+    # The first triad is for owner permissions, the second for group permissions,
+    # and the third for others (everyone else).
+    # For the 1st character:
+    #  'r' means can read
+    #  '-' means not allowed
+    # For the 2nd character:
+    #  'w' means can write
+    #  '-' means not allowed
+    # For the 3rd character:
+    #  'x' means can execute
+    #  's' means can execute and setuid/setgid is set (owner/group triads only)
+    #  'S' means cannot execute and setuid/setgid is set (owner/group triads only)
+    #  't' means can execute and sticky bit is set ("others" triads only)
+    #  'T' means cannot execute and sticky bit is set ("others" triads only)
+    #  '-' means none of these are allowed
+    #
+    # The meanings of 'rwx' perms is not obvious for directories; see:
+    # https://www.hackinglinuxexposed.com/articles/20030424.html
+    #
+    # For information on this notation such as setuid/setgid/sticky bits, see:
+    # https://en.wikipedia.org/wiki/File_system_permissions#Symbolic_notation
+    symbolic_perms_regex = re.compile('[r-][w-][xsS-]' # Owner perms
+                                      '[r-][w-][xsS-]' # Group perms
+                                      '[r-][w-][xtT-]') # Others perms
+
+    def __init__(self, perms: T.Optional[str] = None, owner: T.Union[str, int, None] = None,
+                 group: T.Union[str, int, None] = None):
+        self.perms_s = perms
+        self.perms = self.perms_s_to_bits(perms)
+        self.owner = owner
+        self.group = group
+
+    def __repr__(self) -> str:
+        ret = ' int:
+        '''
+        Does the opposite of stat.filemode(), converts strings of the form
+        'rwxr-xr-x' to st_mode enums which can be passed to os.chmod()
+        '''
+        if perms_s is None:
+            # No perms specified, we will not touch the permissions
+            return -1
+        eg = 'rwxr-xr-x'
+        if not isinstance(perms_s, str):
+            raise MesonException(f'Install perms must be a string. For example, {eg!r}')
+        if len(perms_s) != 9 or not cls.symbolic_perms_regex.match(perms_s):
+            raise MesonException(f'File perms {perms_s!r} must be exactly 9 chars. For example, {eg!r}')
+        perms = 0
+        # Owner perms
+        if perms_s[0] == 'r':
+            perms |= stat.S_IRUSR
+        if perms_s[1] == 'w':
+            perms |= stat.S_IWUSR
+        if perms_s[2] == 'x':
+            perms |= stat.S_IXUSR
+        elif perms_s[2] == 'S':
+            perms |= stat.S_ISUID
+        elif perms_s[2] == 's':
+            perms |= stat.S_IXUSR
+            perms |= stat.S_ISUID
+        # Group perms
+        if perms_s[3] == 'r':
+            perms |= stat.S_IRGRP
+        if perms_s[4] == 'w':
+            perms |= stat.S_IWGRP
+        if perms_s[5] == 'x':
+            perms |= stat.S_IXGRP
+        elif perms_s[5] == 'S':
+            perms |= stat.S_ISGID
+        elif perms_s[5] == 's':
+            perms |= stat.S_IXGRP
+            perms |= stat.S_ISGID
+        # Others perms
+        if perms_s[6] == 'r':
+            perms |= stat.S_IROTH
+        if perms_s[7] == 'w':
+            perms |= stat.S_IWOTH
+        if perms_s[8] == 'x':
+            perms |= stat.S_IXOTH
+        elif perms_s[8] == 'T':
+            perms |= stat.S_ISVTX
+        elif perms_s[8] == 't':
+            perms |= stat.S_IXOTH
+            perms |= stat.S_ISVTX
+        return perms
+
+dot_C_dot_H_warning = """You are using .C or .H files in your project. This is deprecated.
+         Currently, Meson treats this as C++ code, but they
+            used to be treated as C code.
+         Note that the situation is a bit more complex if you are using the
+         Visual Studio compiler, as it treats .C files as C code, unless you add
+         the /TP compiler flag, but this is unreliable.
+         See https://github.com/mesonbuild/meson/pull/8747 for the discussions."""
+class File(HoldableObject):
+    def __init__(self, is_built: bool, subdir: str, fname: str):
+        if fname.endswith(".C") or fname.endswith(".H"):
+            mlog.warning(dot_C_dot_H_warning, once=True)
+        self.is_built = is_built
+        self.subdir = subdir
+        self.fname = fname
+        self.hash = hash((is_built, subdir, fname))
+
+    def __str__(self) -> str:
+        return self.relative_name()
+
+    def __repr__(self) -> str:
+        ret = ' 'File':
+        if not os.path.isfile(os.path.join(source_root, subdir, fname)):
+            raise MesonException(f'File {fname} does not exist.')
+        return File(False, subdir, fname)
+
+    @staticmethod
+    def from_built_file(subdir: str, fname: str) -> 'File':
+        return File(True, subdir, fname)
+
+    @staticmethod
+    def from_built_relative(relative: str) -> 'File':
+        dirpart, fnamepart = os.path.split(relative)
+        return File(True, dirpart, fnamepart)
+
+    @staticmethod
+    def from_absolute_file(fname: str) -> 'File':
+        return File(False, '', fname)
+
+    @lru_cache(maxsize=None)
+    def rel_to_builddir(self, build_to_src: str) -> str:
+        if self.is_built:
+            return self.relative_name()
+        else:
+            return os.path.join(build_to_src, self.subdir, self.fname)
+
+    @lru_cache(maxsize=None)
+    def absolute_path(self, srcdir: str, builddir: str) -> str:
+        absdir = srcdir
+        if self.is_built:
+            absdir = builddir
+        return os.path.join(absdir, self.relative_name())
+
+    @property
+    def suffix(self) -> str:
+        return os.path.splitext(self.fname)[1][1:].lower()
+
+    def endswith(self, ending: T.Union[str, T.Tuple[str, ...]]) -> bool:
+        return self.fname.endswith(ending)
+
+    def split(self, s: str, maxsplit: int = -1) -> T.List[str]:
+        return self.fname.split(s, maxsplit=maxsplit)
+
+    def rsplit(self, s: str, maxsplit: int = -1) -> T.List[str]:
+        return self.fname.rsplit(s, maxsplit=maxsplit)
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, File):
+            return NotImplemented
+        if self.hash != other.hash:
+            return False
+        return (self.fname, self.subdir, self.is_built) == (other.fname, other.subdir, other.is_built)
+
+    def __hash__(self) -> int:
+        return self.hash
+
+    @lru_cache(maxsize=None)
+    def relative_name(self) -> str:
+        return os.path.join(self.subdir, self.fname)
+
+
+def get_compiler_for_source(compilers: T.Iterable['Compiler'], src: 'FileOrString') -> 'Compiler':
+    """Given a set of compilers and a source, find the compiler for that source type."""
+    for comp in compilers:
+        if comp.can_compile(src):
+            return comp
+    raise MesonException(f'No specified compiler can handle file {src!s}')
+
+
+def classify_unity_sources(compilers: T.Iterable['Compiler'], sources: T.Sequence['FileOrString']) -> T.Dict['Compiler', T.List['FileOrString']]:
+    compsrclist: T.Dict['Compiler', T.List['FileOrString']] = {}
+    for src in sources:
+        comp = get_compiler_for_source(compilers, src)
+        if comp not in compsrclist:
+            compsrclist[comp] = [src]
+        else:
+            compsrclist[comp].append(src)
+    return compsrclist
+
+
+class MachineChoice(enum.IntEnum):
+
+    """Enum class representing one of the two abstract machine names used in
+    most places: the build, and host, machines.
+    """
+
+    BUILD = 0
+    HOST = 1
+
+    def get_lower_case_name(self) -> str:
+        return PerMachine('build', 'host')[self]
+
+    def get_prefix(self) -> str:
+        return PerMachine('build.', '')[self]
+
+
+class PerMachine(T.Generic[_T]):
+    def __init__(self, build: _T, host: _T) -> None:
+        self.build = build
+        self.host = host
+
+    def __getitem__(self, machine: MachineChoice) -> _T:
+        return {
+            MachineChoice.BUILD:  self.build,
+            MachineChoice.HOST:   self.host,
+        }[machine]
+
+    def __setitem__(self, machine: MachineChoice, val: _T) -> None:
+        setattr(self, machine.get_lower_case_name(), val)
+
+    def miss_defaulting(self) -> "PerMachineDefaultable[T.Optional[_T]]":
+        """Unset definition duplicated from their previous to None
+
+        This is the inverse of ''default_missing''. By removing defaulted
+        machines, we can elaborate the original and then redefault them and thus
+        avoid repeating the elaboration explicitly.
+        """
+        unfreeze: PerMachineDefaultable[T.Optional[_T]] = PerMachineDefaultable()
+        unfreeze.build = self.build
+        unfreeze.host = self.host
+        if unfreeze.host == unfreeze.build:
+            unfreeze.host = None
+        return unfreeze
+
+    def __repr__(self) -> str:
+        return f'PerMachine({self.build!r}, {self.host!r})'
+
+
+class PerThreeMachine(PerMachine[_T]):
+    """Like `PerMachine` but includes `target` too.
+
+    It turns out just one thing do we need track the target machine. There's no
+    need to computer the `target` field so we don't bother overriding the
+    `__getitem__`/`__setitem__` methods.
+    """
+    def __init__(self, build: _T, host: _T, target: _T) -> None:
+        super().__init__(build, host)
+        self.target = target
+
+    def miss_defaulting(self) -> "PerThreeMachineDefaultable[T.Optional[_T]]":
+        """Unset definition duplicated from their previous to None
+
+        This is the inverse of ''default_missing''. By removing defaulted
+        machines, we can elaborate the original and then redefault them and thus
+        avoid repeating the elaboration explicitly.
+        """
+        unfreeze: PerThreeMachineDefaultable[T.Optional[_T]] = PerThreeMachineDefaultable()
+        unfreeze.build = self.build
+        unfreeze.host = self.host
+        unfreeze.target = self.target
+        if unfreeze.target == unfreeze.host:
+            unfreeze.target = None
+        if unfreeze.host == unfreeze.build:
+            unfreeze.host = None
+        return unfreeze
+
+    def matches_build_machine(self, machine: MachineChoice) -> bool:
+        return self.build == self[machine]
+
+    def __repr__(self) -> str:
+        return f'PerThreeMachine({self.build!r}, {self.host!r}, {self.target!r})'
+
+
+class PerMachineDefaultable(PerMachine[T.Optional[_T]]):
+    """Extends `PerMachine` with the ability to default from `None`s.
+    """
+    def __init__(self, build: T.Optional[_T] = None, host: T.Optional[_T] = None) -> None:
+        super().__init__(build, host)
+
+    def default_missing(self) -> "PerMachine[_T]":
+        """Default host to build
+
+        This allows just specifying nothing in the native case, and just host in the
+        cross non-compiler case.
+        """
+        freeze = PerMachine(self.build, self.host)
+        if freeze.host is None:
+            freeze.host = freeze.build
+        return freeze
+
+    def __repr__(self) -> str:
+        return f'PerMachineDefaultable({self.build!r}, {self.host!r})'
+
+    @classmethod
+    def default(cls, is_cross: bool, build: _T, host: _T) -> PerMachine[_T]:
+        """Easy way to get a defaulted value
+
+        This allows simplifying the case where you can control whether host and
+        build are separate or not with a boolean. If the is_cross value is set
+        to true then the optional host value will be used, otherwise the host
+        will be set to the build value.
+        """
+        m = cls(build)
+        if is_cross:
+            m.host = host
+        return m.default_missing()
+
+
+class PerThreeMachineDefaultable(PerMachineDefaultable[T.Optional[_T]], PerThreeMachine[T.Optional[_T]]):
+    """Extends `PerThreeMachine` with the ability to default from `None`s.
+    """
+    def __init__(self) -> None:
+        PerThreeMachine.__init__(self, None, None, None)
+
+    def default_missing(self) -> "PerThreeMachine[T.Optional[_T]]":
+        """Default host to build and target to host.
+
+        This allows just specifying nothing in the native case, just host in the
+        cross non-compiler case, and just target in the native-built
+        cross-compiler case.
+        """
+        freeze = PerThreeMachine(self.build, self.host, self.target)
+        if freeze.host is None:
+            freeze.host = freeze.build
+        if freeze.target is None:
+            freeze.target = freeze.host
+        return freeze
+
+    def __repr__(self) -> str:
+        return f'PerThreeMachineDefaultable({self.build!r}, {self.host!r}, {self.target!r})'
+
+
+def is_sunos() -> bool:
+    return platform.system().lower() == 'sunos'
+
+
+def is_osx() -> bool:
+    return platform.system().lower() == 'darwin'
+
+
+def is_linux() -> bool:
+    return platform.system().lower() == 'linux'
+
+
+def is_android() -> bool:
+    return platform.system().lower() == 'android'
+
+
+def is_haiku() -> bool:
+    return platform.system().lower() == 'haiku'
+
+
+def is_openbsd() -> bool:
+    return platform.system().lower() == 'openbsd'
+
+
+def is_windows() -> bool:
+    platname = platform.system().lower()
+    return platname == 'windows'
+
+def is_wsl() -> bool:
+    return is_linux() and 'microsoft' in platform.release().lower()
+
+def is_cygwin() -> bool:
+    return sys.platform == 'cygwin'
+
+
+def is_debianlike() -> bool:
+    return os.path.isfile('/etc/debian_version')
+
+
+def is_dragonflybsd() -> bool:
+    return platform.system().lower() == 'dragonfly'
+
+
+def is_netbsd() -> bool:
+    return platform.system().lower() == 'netbsd'
+
+
+def is_freebsd() -> bool:
+    return platform.system().lower() == 'freebsd'
+
+def is_irix() -> bool:
+    return platform.system().startswith('irix')
+
+def is_hurd() -> bool:
+    return platform.system().lower() == 'gnu'
+
+def is_qnx() -> bool:
+    return platform.system().lower() == 'qnx'
+
+def is_aix() -> bool:
+    return platform.system().lower() == 'aix'
+
+def exe_exists(arglist: T.List[str]) -> bool:
+    try:
+        if subprocess.run(arglist, timeout=10).returncode == 0:
+            return True
+    except (FileNotFoundError, subprocess.TimeoutExpired):
+        pass
+    return False
+
+
+@lru_cache(maxsize=None)
+def darwin_get_object_archs(objpath: str) -> 'ImmutableListProtocol[str]':
+    '''
+    For a specific object (executable, static library, dylib, etc), run `lipo`
+    to fetch the list of archs supported by it. Supports both thin objects and
+    'fat' objects.
+    '''
+    _, stdo, stderr = Popen_safe(['lipo', '-info', objpath])
+    if not stdo:
+        mlog.debug(f'lipo {objpath}: {stderr}')
+        return None
+    stdo = stdo.rsplit(': ', 1)[1]
+
+    # Convert from lipo-style archs to meson-style CPUs
+    map_arch = {
+        'i386': 'x86',
+        'arm64': 'aarch64',
+        'arm64e': 'aarch64',
+        'ppc7400': 'ppc',
+        'ppc970': 'ppc',
+    }
+    lipo_archs = stdo.split()
+    meson_archs = [map_arch.get(lipo_arch, lipo_arch) for lipo_arch in lipo_archs]
+
+    # Add generic name for armv7 and armv7s
+    if 'armv7' in stdo:
+        meson_archs.append('arm')
+
+    return meson_archs
+
+def windows_detect_native_arch() -> str:
+    """
+    The architecture of Windows itself: x86, amd64 or arm64
+    """
+    if sys.platform != 'win32':
+        return ''
+    try:
+        import ctypes
+        process_arch = ctypes.c_ushort()
+        native_arch = ctypes.c_ushort()
+        kernel32 = ctypes.windll.kernel32
+        process = ctypes.c_void_p(kernel32.GetCurrentProcess())
+        # This is the only reliable way to detect an arm system if we are an x86/x64 process being emulated
+        if kernel32.IsWow64Process2(process, ctypes.byref(process_arch), ctypes.byref(native_arch)):
+            # https://docs.microsoft.com/en-us/windows/win32/sysinfo/image-file-machine-constants
+            if native_arch.value == 0x8664:
+                return 'amd64'
+            elif native_arch.value == 0x014C:
+                return 'x86'
+            elif native_arch.value == 0xAA64:
+                return 'arm64'
+            elif native_arch.value == 0x01C4:
+                return 'arm'
+    except (OSError, AttributeError):
+        pass
+    # These env variables are always available. See:
+    # https://msdn.microsoft.com/en-us/library/aa384274(VS.85).aspx
+    # https://blogs.msdn.microsoft.com/david.wang/2006/03/27/howto-detect-process-bitness/
+    arch = os.environ.get('PROCESSOR_ARCHITEW6432', '').lower()
+    if not arch:
+        try:
+            # If this doesn't exist, something is messing with the environment
+            arch = os.environ['PROCESSOR_ARCHITECTURE'].lower()
+        except KeyError:
+            raise EnvironmentException('Unable to detect native OS architecture')
+    return arch
+
+def detect_vcs(source_dir: T.Union[str, Path]) -> T.Optional[T.Dict[str, str]]:
+    vcs_systems = [
+        {
+            'name': 'git',
+            'cmd': 'git',
+            'repo_dir': '.git',
+            'get_rev': 'git describe --dirty=+ --always',
+            'rev_regex': '(.*)',
+            'dep': '.git/logs/HEAD'
+        },
+        {
+            'name': 'mercurial',
+            'cmd': 'hg',
+            'repo_dir': '.hg',
+            'get_rev': 'hg id -i',
+            'rev_regex': '(.*)',
+            'dep': '.hg/dirstate'
+        },
+        {
+            'name': 'subversion',
+            'cmd': 'svn',
+            'repo_dir': '.svn',
+            'get_rev': 'svn info',
+            'rev_regex': 'Revision: (.*)',
+            'dep': '.svn/wc.db'
+        },
+        {
+            'name': 'bazaar',
+            'cmd': 'bzr',
+            'repo_dir': '.bzr',
+            'get_rev': 'bzr revno',
+            'rev_regex': '(.*)',
+            'dep': '.bzr'
+        },
+    ]
+    if isinstance(source_dir, str):
+        source_dir = Path(source_dir)
+
+    parent_paths_and_self = collections.deque(source_dir.parents)
+    # Prepend the source directory to the front so we can check it;
+    # source_dir.parents doesn't include source_dir
+    parent_paths_and_self.appendleft(source_dir)
+    for curdir in parent_paths_and_self:
+        for vcs in vcs_systems:
+            if Path.is_dir(curdir.joinpath(vcs['repo_dir'])) and shutil.which(vcs['cmd']):
+                vcs['wc_dir'] = str(curdir)
+                return vcs
+    return None
+
+def current_vs_supports_modules() -> bool:
+    vsver = os.environ.get('VSCMD_VER', '')
+    nums = vsver.split('.', 2)
+    major = int(nums[0])
+    if major >= 17:
+        return True
+    if major == 16 and int(nums[1]) >= 10:
+        return True
+    return vsver.startswith('16.9.0') and '-pre.' in vsver
+
+# a helper class which implements the same version ordering as RPM
+class Version:
+    def __init__(self, s: str) -> None:
+        self._s = s
+
+        # split into numeric, alphabetic and non-alphanumeric sequences
+        sequences1 = re.finditer(r'(\d+|[a-zA-Z]+|[^a-zA-Z\d]+)', s)
+
+        # non-alphanumeric separators are discarded
+        sequences2 = [m for m in sequences1 if not re.match(r'[^a-zA-Z\d]+', m.group(1))]
+
+        # numeric sequences are converted from strings to ints
+        sequences3 = [int(m.group(1)) if m.group(1).isdigit() else m.group(1) for m in sequences2]
+
+        self._v = sequences3
+
+    def __str__(self) -> str:
+        return '{} (V={})'.format(self._s, str(self._v))
+
+    def __repr__(self) -> str:
+        return f''
+
+    def __lt__(self, other: object) -> bool:
+        if isinstance(other, Version):
+            return self.__cmp(other, operator.lt)
+        return NotImplemented
+
+    def __gt__(self, other: object) -> bool:
+        if isinstance(other, Version):
+            return self.__cmp(other, operator.gt)
+        return NotImplemented
+
+    def __le__(self, other: object) -> bool:
+        if isinstance(other, Version):
+            return self.__cmp(other, operator.le)
+        return NotImplemented
+
+    def __ge__(self, other: object) -> bool:
+        if isinstance(other, Version):
+            return self.__cmp(other, operator.ge)
+        return NotImplemented
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, Version):
+            return self._v == other._v
+        return NotImplemented
+
+    def __ne__(self, other: object) -> bool:
+        if isinstance(other, Version):
+            return self._v != other._v
+        return NotImplemented
+
+    def __cmp(self, other: 'Version', comparator: T.Callable[[T.Any, T.Any], bool]) -> bool:
+        # compare each sequence in order
+        for ours, theirs in zip(self._v, other._v):
+            # sort a non-digit sequence before a digit sequence
+            ours_is_int = isinstance(ours, int)
+            theirs_is_int = isinstance(theirs, int)
+            if ours_is_int != theirs_is_int:
+                return comparator(ours_is_int, theirs_is_int)
+
+            if ours != theirs:
+                return comparator(ours, theirs)
+
+        # if equal length, all components have matched, so equal
+        # otherwise, the version with a suffix remaining is greater
+        return comparator(len(self._v), len(other._v))
+
+
+def _version_extract_cmpop(vstr2: str) -> T.Tuple[T.Callable[[T.Any, T.Any], bool], str]:
+    if vstr2.startswith('>='):
+        cmpop = operator.ge
+        vstr2 = vstr2[2:]
+    elif vstr2.startswith('<='):
+        cmpop = operator.le
+        vstr2 = vstr2[2:]
+    elif vstr2.startswith('!='):
+        cmpop = operator.ne
+        vstr2 = vstr2[2:]
+    elif vstr2.startswith('=='):
+        cmpop = operator.eq
+        vstr2 = vstr2[2:]
+    elif vstr2.startswith('='):
+        cmpop = operator.eq
+        vstr2 = vstr2[1:]
+    elif vstr2.startswith('>'):
+        cmpop = operator.gt
+        vstr2 = vstr2[1:]
+    elif vstr2.startswith('<'):
+        cmpop = operator.lt
+        vstr2 = vstr2[1:]
+    else:
+        cmpop = operator.eq
+
+    return (cmpop, vstr2)
+
+
+def version_compare(vstr1: str, vstr2: str) -> bool:
+    (cmpop, vstr2) = _version_extract_cmpop(vstr2)
+    return cmpop(Version(vstr1), Version(vstr2))
+
+
+def version_compare_many(vstr1: str, conditions: T.Union[str, T.Iterable[str]]) -> T.Tuple[bool, T.List[str], T.List[str]]:
+    if isinstance(conditions, str):
+        conditions = [conditions]
+    found: T.List[str] = []
+    not_found: T.List[str] = []
+    for req in conditions:
+        if not version_compare(vstr1, req):
+            not_found.append(req)
+        else:
+            found.append(req)
+    return not not_found, not_found, found
+
+
+# determine if the minimum version satisfying the condition |condition| exceeds
+# the minimum version for a feature |minimum|
+def version_compare_condition_with_min(condition: str, minimum: str) -> bool:
+    if condition.startswith('>='):
+        cmpop = operator.le
+        condition = condition[2:]
+    elif condition.startswith('<='):
+        return False
+    elif condition.startswith('!='):
+        return False
+    elif condition.startswith('=='):
+        cmpop = operator.le
+        condition = condition[2:]
+    elif condition.startswith('='):
+        cmpop = operator.le
+        condition = condition[1:]
+    elif condition.startswith('>'):
+        cmpop = operator.lt
+        condition = condition[1:]
+    elif condition.startswith('<'):
+        return False
+    else:
+        cmpop = operator.le
+
+    # Declaring a project(meson_version: '>=0.46') and then using features in
+    # 0.46.0 is valid, because (knowing the meson versioning scheme) '0.46.0' is
+    # the lowest version which satisfies the constraint '>=0.46'.
+    #
+    # But this will fail here, because the minimum version required by the
+    # version constraint ('0.46') is strictly less (in our version comparison)
+    # than the minimum version needed for the feature ('0.46.0').
+    #
+    # Map versions in the constraint of the form '0.46' to '0.46.0', to embed
+    # this knowledge of the meson versioning scheme.
+    condition = condition.strip()
+    if re.match(r'^\d+.\d+$', condition):
+        condition += '.0'
+
+    return T.cast('bool', cmpop(Version(minimum), Version(condition)))
+
+def search_version(text: str) -> str:
+    # Usually of the type 4.1.4 but compiler output may contain
+    # stuff like this:
+    # (Sourcery CodeBench Lite 2014.05-29) 4.8.3 20140320 (prerelease)
+    # Limiting major version number to two digits seems to work
+    # thus far. When we get to GCC 100, this will break, but
+    # if we are still relevant when that happens, it can be
+    # considered an achievement in itself.
+    #
+    # This regex is reaching magic levels. If it ever needs
+    # to be updated, do not complexify but convert to something
+    # saner instead.
+    # We'll demystify it a bit with a verbose definition.
+    version_regex = re.compile(r"""
+    (? str:
+    if is_debianlike():
+        try:
+            pc = subprocess.Popen(['dpkg-architecture', '-qDEB_HOST_MULTIARCH'],
+                                  stdout=subprocess.PIPE,
+                                  stderr=subprocess.DEVNULL)
+            (stdo, _) = pc.communicate()
+            if pc.returncode == 0:
+                archpath = stdo.decode().strip()
+                return 'lib/' + archpath
+        except Exception:
+            pass
+    if is_freebsd() or is_irix():
+        return 'lib'
+    if os.path.isdir('/usr/lib64') and not os.path.islink('/usr/lib64'):
+        return 'lib64'
+    return 'lib'
+
+
+def default_libexecdir() -> str:
+    if is_haiku():
+        return 'lib'
+    # There is no way to auto-detect this, so it must be set at build time
+    return 'libexec'
+
+
+def default_prefix() -> str:
+    if is_windows():
+        return 'c:/'
+    if is_haiku():
+        return '/boot/system/non-packaged'
+    return '/usr/local'
+
+
+def default_datadir() -> str:
+    if is_haiku():
+        return 'data'
+    return 'share'
+
+
+def default_includedir() -> str:
+    if is_haiku():
+        return 'develop/headers'
+    return 'include'
+
+
+def default_infodir() -> str:
+    if is_haiku():
+        return 'documentation/info'
+    return 'share/info'
+
+
+def default_localedir() -> str:
+    if is_haiku():
+        return 'data/locale'
+    return 'share/locale'
+
+
+def default_mandir() -> str:
+    if is_haiku():
+        return 'documentation/man'
+    return 'share/man'
+
+
+def default_sbindir() -> str:
+    if is_haiku():
+        return 'bin'
+    return 'sbin'
+
+
+def default_sysconfdir() -> str:
+    if is_haiku():
+        return 'settings'
+    return 'etc'
+
+
+def has_path_sep(name: str, sep: str = '/\\') -> bool:
+    'Checks if any of the specified @sep path separators are in @name'
+    for each in sep:
+        if each in name:
+            return True
+    return False
+
+
+if is_windows():
+    # shlex.split is not suitable for splitting command line on Window (https://bugs.python.org/issue1724822);
+    # shlex.quote is similarly problematic. Below are "proper" implementations of these functions according to
+    # https://docs.microsoft.com/en-us/cpp/c-language/parsing-c-command-line-arguments and
+    # https://blogs.msdn.microsoft.com/twistylittlepassagesallalike/2011/04/23/everyone-quotes-command-line-arguments-the-wrong-way/
+
+    _whitespace = ' \t\n\r'
+    _find_unsafe_char = re.compile(fr'[{_whitespace}"]').search
+
+    def quote_arg(arg: str) -> str:
+        if arg and not _find_unsafe_char(arg):
+            return arg
+
+        result = '"'
+        num_backslashes = 0
+        for c in arg:
+            if c == '\\':
+                num_backslashes += 1
+            else:
+                if c == '"':
+                    # Escape all backslashes and the following double quotation mark
+                    num_backslashes = num_backslashes * 2 + 1
+
+                result += num_backslashes * '\\' + c
+                num_backslashes = 0
+
+        # Escape all backslashes, but let the terminating double quotation
+        # mark we add below be interpreted as a metacharacter
+        result += (num_backslashes * 2) * '\\' + '"'
+        return result
+
+    def split_args(cmd: str) -> T.List[str]:
+        result: T.List[str] = []
+        arg = ''
+        num_backslashes = 0
+        num_quotes = 0
+        in_quotes = False
+        for c in cmd:
+            if c == '\\':
+                num_backslashes += 1
+            else:
+                if c == '"' and not num_backslashes % 2:
+                    # unescaped quote, eat it
+                    arg += (num_backslashes // 2) * '\\'
+                    num_quotes += 1
+                    in_quotes = not in_quotes
+                elif c in _whitespace and not in_quotes:
+                    if arg or num_quotes:
+                        # reached the end of the argument
+                        result.append(arg)
+                        arg = ''
+                        num_quotes = 0
+                else:
+                    if c == '"':
+                        # escaped quote
+                        num_backslashes = (num_backslashes - 1) // 2
+
+                    arg += num_backslashes * '\\' + c
+
+                num_backslashes = 0
+
+        if arg or num_quotes:
+            result.append(arg)
+
+        return result
+else:
+    def quote_arg(arg: str) -> str:
+        return shlex.quote(arg)
+
+    def split_args(cmd: str) -> T.List[str]:
+        return shlex.split(cmd)
+
+
+def join_args(args: T.Iterable[str]) -> str:
+    return ' '.join([quote_arg(x) for x in args])
+
+
+def do_replacement(regex: T.Pattern[str], line: str,
+                   variable_format: Literal['meson', 'cmake', 'cmake@'],
+                   confdata: T.Union[T.Dict[str, T.Tuple[str, T.Optional[str]]], 'ConfigurationData']) -> T.Tuple[str, T.Set[str]]:
+    missing_variables: T.Set[str] = set()
+    if variable_format == 'cmake':
+        start_tag = '${'
+        backslash_tag = '\\${'
+    else:
+        start_tag = '@'
+        backslash_tag = '\\@'
+
+    def variable_replace(match: T.Match[str]) -> str:
+        # Pairs of escape characters before '@' or '\@'
+        if match.group(0).endswith('\\'):
+            num_escapes = match.end(0) - match.start(0)
+            return '\\' * (num_escapes // 2)
+        # Single escape character and '@'
+        elif match.group(0) == backslash_tag:
+            return start_tag
+        # Template variable to be replaced
+        else:
+            varname = match.group(1)
+            var_str = ''
+            if varname in confdata:
+                var, _ = confdata.get(varname)
+                if isinstance(var, str):
+                    var_str = var
+                elif isinstance(var, int):
+                    var_str = str(var)
+                else:
+                    msg = f'Tried to replace variable {varname!r} value with ' \
+                          f'something other than a string or int: {var!r}'
+                    raise MesonException(msg)
+            else:
+                missing_variables.add(varname)
+            return var_str
+    return re.sub(regex, variable_replace, line), missing_variables
+
+def do_define(regex: T.Pattern[str], line: str, confdata: 'ConfigurationData',
+              variable_format: Literal['meson', 'cmake', 'cmake@'], subproject: T.Optional[SubProject] = None) -> str:
+    def get_cmake_define(line: str, confdata: 'ConfigurationData') -> str:
+        arr = line.split()
+        define_value: T.List[str] = []
+        for token in arr[2:]:
+            try:
+                v, _ = confdata.get(token)
+                define_value += [str(v)]
+            except KeyError:
+                define_value += [token]
+        return ' '.join(define_value)
+
+    arr = line.split()
+    if len(arr) != 2:
+        if variable_format == 'meson':
+            raise MesonException('#mesondefine does not contain exactly two tokens: %s' % line.strip())
+        elif subproject is not None:
+            from ..interpreterbase.decorators import FeatureNew
+            FeatureNew.single_use('cmakedefine without exactly two tokens', '0.54.1', subproject)
+
+    varname = arr[1]
+    try:
+        v, _ = confdata.get(varname)
+    except KeyError:
+        return '/* #undef %s */\n' % varname
+    if isinstance(v, bool):
+        if v:
+            return '#define %s\n' % varname
+        else:
+            return '#undef %s\n' % varname
+    elif isinstance(v, int):
+        return '#define %s %d\n' % (varname, v)
+    elif isinstance(v, str):
+        if variable_format == 'meson':
+            result = v
+        else:
+            result = get_cmake_define(line, confdata)
+        result = f'#define {varname} {result}\n'
+        result, _ = do_replacement(regex, result, variable_format, confdata)
+        return result
+    else:
+        raise MesonException('#mesondefine argument "%s" is of unknown type.' % varname)
+
+def get_variable_regex(variable_format: Literal['meson', 'cmake', 'cmake@'] = 'meson') -> T.Pattern[str]:
+    # Only allow (a-z, A-Z, 0-9, _, -) as valid characters for a define
+    # Also allow escaping '@' with '\@'
+    if variable_format in {'meson', 'cmake@'}:
+        regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@')
+    else:
+        regex = re.compile(r'(?:\\\\)+(?=\\?\$)|\\\${|\${([-a-zA-Z0-9_]+)}')
+    return regex
+
+def do_conf_str(src: str, data: T.List[str], confdata: 'ConfigurationData',
+                variable_format: Literal['meson', 'cmake', 'cmake@'],
+                subproject: T.Optional[SubProject] = None) -> T.Tuple[T.List[str], T.Set[str], bool]:
+    def line_is_valid(line: str, variable_format: str) -> bool:
+        if variable_format == 'meson':
+            if '#cmakedefine' in line:
+                return False
+        else: # cmake format
+            if '#mesondefine' in line:
+                return False
+        return True
+
+    regex = get_variable_regex(variable_format)
+
+    search_token = '#mesondefine'
+    if variable_format != 'meson':
+        search_token = '#cmakedefine'
+
+    result: T.List[str] = []
+    missing_variables: T.Set[str] = set()
+    # Detect when the configuration data is empty and no tokens were found
+    # during substitution so we can warn the user to use the `copy:` kwarg.
+    confdata_useless = not confdata.keys()
+    for line in data:
+        if line.startswith(search_token):
+            confdata_useless = False
+            line = do_define(regex, line, confdata, variable_format, subproject)
+        else:
+            if not line_is_valid(line, variable_format):
+                raise MesonException(f'Format error in {src}: saw "{line.strip()}" when format set to "{variable_format}"')
+            line, missing = do_replacement(regex, line, variable_format, confdata)
+            missing_variables.update(missing)
+            if missing:
+                confdata_useless = False
+        result.append(line)
+
+    return result, missing_variables, confdata_useless
+
+def do_conf_file(src: str, dst: str, confdata: 'ConfigurationData',
+                 variable_format: Literal['meson', 'cmake', 'cmake@'],
+                 encoding: str = 'utf-8', subproject: T.Optional[SubProject] = None) -> T.Tuple[T.Set[str], bool]:
+    try:
+        with open(src, encoding=encoding, newline='') as f:
+            data = f.readlines()
+    except Exception as e:
+        raise MesonException(f'Could not read input file {src}: {e!s}')
+
+    (result, missing_variables, confdata_useless) = do_conf_str(src, data, confdata, variable_format, subproject)
+    dst_tmp = dst + '~'
+    try:
+        with open(dst_tmp, 'w', encoding=encoding, newline='') as f:
+            f.writelines(result)
+    except Exception as e:
+        raise MesonException(f'Could not write output file {dst}: {e!s}')
+    shutil.copymode(src, dst_tmp)
+    replace_if_different(dst, dst_tmp)
+    return missing_variables, confdata_useless
+
+CONF_C_PRELUDE = '''/*
+ * Autogenerated by the Meson build system.
+ * Do not edit, your changes will be lost.
+ */
+
+#pragma once
+
+'''
+
+CONF_NASM_PRELUDE = '''; Autogenerated by the Meson build system.
+; Do not edit, your changes will be lost.
+
+'''
+
+def dump_conf_header(ofilename: str, cdata: 'ConfigurationData', output_format: T.Literal['c', 'nasm']) -> None:
+    if output_format == 'c':
+        prelude = CONF_C_PRELUDE
+        prefix = '#'
+    else:
+        prelude = CONF_NASM_PRELUDE
+        prefix = '%'
+
+    ofilename_tmp = ofilename + '~'
+    with open(ofilename_tmp, 'w', encoding='utf-8') as ofile:
+        ofile.write(prelude)
+        for k in sorted(cdata.keys()):
+            (v, desc) = cdata.get(k)
+            if desc:
+                if output_format == 'c':
+                    ofile.write('/* %s */\n' % desc)
+                elif output_format == 'nasm':
+                    for line in desc.split('\n'):
+                        ofile.write('; %s\n' % line)
+            if isinstance(v, bool):
+                if v:
+                    ofile.write(f'{prefix}define {k}\n\n')
+                else:
+                    ofile.write(f'{prefix}undef {k}\n\n')
+            elif isinstance(v, (int, str)):
+                ofile.write(f'{prefix}define {k} {v}\n\n')
+            else:
+                raise MesonException('Unknown data type in configuration file entry: ' + k)
+    replace_if_different(ofilename, ofilename_tmp)
+
+
+def replace_if_different(dst: str, dst_tmp: str) -> None:
+    # If contents are identical, don't touch the file to prevent
+    # unnecessary rebuilds.
+    different = True
+    try:
+        with open(dst, 'rb') as f1, open(dst_tmp, 'rb') as f2:
+            if f1.read() == f2.read():
+                different = False
+    except FileNotFoundError:
+        pass
+    if different:
+        os.replace(dst_tmp, dst)
+    else:
+        os.unlink(dst_tmp)
+
+
+def listify(item: T.Any, flatten: bool = True) -> T.List[T.Any]:
+    '''
+    Returns a list with all args embedded in a list if they are not a list.
+    This function preserves order.
+    @flatten: Convert lists of lists to a flat list
+    '''
+    if not isinstance(item, list):
+        return [item]
+    result: T.List[T.Any] = []
+    for i in item:
+        if flatten and isinstance(i, list):
+            result += listify(i, flatten=True)
+        else:
+            result.append(i)
+    return result
+
+
+def extract_as_list(dict_object: T.Dict[_T, _U], key: _T, pop: bool = False) -> T.List[_U]:
+    '''
+    Extracts all values from given dict_object and listifies them.
+    '''
+    fetch: T.Callable[[_T], _U] = dict_object.get
+    if pop:
+        fetch = dict_object.pop
+    # If there's only one key, we don't return a list with one element
+    return listify(fetch(key) or [], flatten=True)
+
+
+def typeslistify(item: 'T.Union[_T, T.Sequence[_T]]',
+                 types: 'T.Union[T.Type[_T], T.Tuple[T.Type[_T]]]') -> T.List[_T]:
+    '''
+    Ensure that type(@item) is one of @types or a
+    list of items all of which are of type @types
+    '''
+    if isinstance(item, types):
+        item = T.cast('T.List[_T]', [item])
+    if not isinstance(item, list):
+        raise MesonException('Item must be a list or one of {!r}, not {!r}'.format(types, type(item)))
+    for i in item:
+        if i is not None and not isinstance(i, types):
+            raise MesonException('List item must be one of {!r}, not {!r}'.format(types, type(i)))
+    return item
+
+
+def stringlistify(item: T.Union[T.Any, T.Sequence[T.Any]]) -> T.List[str]:
+    return typeslistify(item, str)
+
+
+def expand_arguments(args: T.Iterable[str]) -> T.Optional[T.List[str]]:
+    expended_args: T.List[str] = []
+    for arg in args:
+        if not arg.startswith('@'):
+            expended_args.append(arg)
+            continue
+
+        args_file = arg[1:]
+        try:
+            with open(args_file, encoding='utf-8') as f:
+                extended_args = f.read().split()
+            expended_args += extended_args
+        except Exception as e:
+            mlog.error('Expanding command line arguments:',  args_file, 'not found')
+            mlog.exception(e)
+            return None
+    return expended_args
+
+
+def partition(pred: T.Callable[[_T], object], iterable: T.Iterable[_T]) -> T.Tuple[T.Iterator[_T], T.Iterator[_T]]:
+    """Use a predicate to partition entries into false entries and true
+    entries.
+
+    >>> x, y = partition(is_odd, range(10))
+    >>> (list(x), list(y))
+    ([0, 2, 4, 6, 8], [1, 3, 5, 7, 9])
+    """
+    t1, t2 = tee(iterable)
+    return (t for t in t1 if not pred(t)), (t for t in t2 if pred(t))
+
+
+def Popen_safe(args: T.List[str], write: T.Optional[str] = None,
+               stdin: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.DEVNULL,
+               stdout: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
+               stderr: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
+               **kwargs: T.Any) -> T.Tuple['subprocess.Popen[str]', str, str]:
+    import locale
+    encoding = locale.getpreferredencoding()
+    # Stdin defaults to DEVNULL otherwise the command run by us here might mess
+    # up the console and ANSI colors will stop working on Windows.
+    # If write is not None, set stdin to PIPE so data can be sent.
+    if write is not None:
+        stdin = subprocess.PIPE
+
+    try:
+        if not sys.stdout.encoding or encoding.upper() != 'UTF-8':
+            p, o, e = Popen_safe_legacy(args, write=write, stdin=stdin, stdout=stdout, stderr=stderr, **kwargs)
+        else:
+            p = subprocess.Popen(args, universal_newlines=True, encoding=encoding, close_fds=False,
+                                 stdin=stdin, stdout=stdout, stderr=stderr, **kwargs)
+            o, e = p.communicate(write)
+    except OSError as oserr:
+        if oserr.errno == errno.ENOEXEC:
+            raise MesonException(f'Failed running {args[0]!r}, binary or interpreter not executable.\n'
+                                 'Possibly wrong architecture or the executable bit is not set.')
+        raise
+    # Sometimes the command that we run will call another command which will be
+    # without the above stdin workaround, so set the console mode again just in
+    # case.
+    mlog.setup_console()
+    return p, o, e
+
+
+def Popen_safe_legacy(args: T.List[str], write: T.Optional[str] = None,
+                      stdin: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.DEVNULL,
+                      stdout: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
+                      stderr: T.Union[T.TextIO, T.BinaryIO, int] = subprocess.PIPE,
+                      **kwargs: T.Any) -> T.Tuple['subprocess.Popen[str]', str, str]:
+    p = subprocess.Popen(args, universal_newlines=False, close_fds=False,
+                         stdin=stdin, stdout=stdout, stderr=stderr, **kwargs)
+    input_ = None  # type: T.Optional[bytes]
+    if write is not None:
+        input_ = write.encode('utf-8')
+    o, e = p.communicate(input_)
+    if o is not None:
+        if sys.stdout.encoding is not None:
+            o = o.decode(encoding=sys.stdout.encoding, errors='replace').replace('\r\n', '\n')
+        else:
+            o = o.decode(errors='replace').replace('\r\n', '\n')
+    if e is not None:
+        if sys.stderr is not None and sys.stderr.encoding:
+            e = e.decode(encoding=sys.stderr.encoding, errors='replace').replace('\r\n', '\n')
+        else:
+            e = e.decode(errors='replace').replace('\r\n', '\n')
+    return p, o, e
+
+
+def Popen_safe_logged(args: T.List[str], msg: str = 'Called', **kwargs: T.Any) -> T.Tuple['subprocess.Popen[str]', str, str]:
+    '''
+    Wrapper around Popen_safe that assumes standard piped o/e and logs this to the meson log.
+    '''
+    p, o, e = Popen_safe(args, **kwargs)
+    rc, out, err = p.returncode, o.strip(), e.strip()
+    mlog.debug('-----------')
+    mlog.debug(f'{msg}: `{join_args(args)}` -> {rc}')
+    if out:
+        mlog.debug(f'stdout:\n{out}\n-----------')
+    if err:
+        mlog.debug(f'stderr:\n{err}\n-----------')
+    return p, o, e
+
+
+def iter_regexin_iter(regexiter: T.Iterable[str], initer: T.Iterable[str]) -> T.Optional[str]:
+    '''
+    Takes each regular expression in @regexiter and tries to search for it in
+    every item in @initer. If there is a match, returns that match.
+    Else returns False.
+    '''
+    for regex in regexiter:
+        for ii in initer:
+            if not isinstance(ii, str):
+                continue
+            match = re.search(regex, ii)
+            if match:
+                return match.group()
+    return None
+
+
+def _substitute_values_check_errors(command: T.List[str], values: T.Dict[str, T.Union[str, T.List[str]]]) -> None:
+    # Error checking
+    inregex: T.List[str] = ['@INPUT([0-9]+)?@', '@PLAINNAME@', '@BASENAME@']
+    outregex: T.List[str] = ['@OUTPUT([0-9]+)?@', '@OUTDIR@']
+    if '@INPUT@' not in values:
+        # Error out if any input-derived templates are present in the command
+        match = iter_regexin_iter(inregex, command)
+        if match:
+            raise MesonException(f'Command cannot have {match!r}, since no input files were specified')
+    else:
+        if len(values['@INPUT@']) > 1:
+            # Error out if @PLAINNAME@ or @BASENAME@ is present in the command
+            match = iter_regexin_iter(inregex[1:], command)
+            if match:
+                raise MesonException(f'Command cannot have {match!r} when there is '
+                                     'more than one input file')
+        # Error out if an invalid @INPUTnn@ template was specified
+        for each in command:
+            if not isinstance(each, str):
+                continue
+            match2 = re.search(inregex[0], each)
+            if match2 and match2.group() not in values:
+                m = 'Command cannot have {!r} since there are only {!r} inputs'
+                raise MesonException(m.format(match2.group(), len(values['@INPUT@'])))
+    if '@OUTPUT@' not in values:
+        # Error out if any output-derived templates are present in the command
+        match = iter_regexin_iter(outregex, command)
+        if match:
+            raise MesonException(f'Command cannot have {match!r} since there are no outputs')
+    else:
+        # Error out if an invalid @OUTPUTnn@ template was specified
+        for each in command:
+            if not isinstance(each, str):
+                continue
+            match2 = re.search(outregex[0], each)
+            if match2 and match2.group() not in values:
+                m = 'Command cannot have {!r} since there are only {!r} outputs'
+                raise MesonException(m.format(match2.group(), len(values['@OUTPUT@'])))
+
+
+def substitute_values(command: T.List[str], values: T.Dict[str, T.Union[str, T.List[str]]]) -> T.List[str]:
+    '''
+    Substitute the template strings in the @values dict into the list of
+    strings @command and return a new list. For a full list of the templates,
+    see get_filenames_templates_dict()
+
+    If multiple inputs/outputs are given in the @values dictionary, we
+    substitute @INPUT@ and @OUTPUT@ only if they are the entire string, not
+    just a part of it, and in that case we substitute *all* of them.
+
+    The typing of this function is difficult, as only @OUTPUT@ and @INPUT@ can
+    be lists, everything else is a string. However, TypeDict cannot represent
+    this, as you can have optional keys, but not extra keys. We end up just
+    having to us asserts to convince type checkers that this is okay.
+
+    https://github.com/python/mypy/issues/4617
+    '''
+
+    def replace(m: T.Match[str]) -> str:
+        v = values[m.group(0)]
+        assert isinstance(v, str), 'for mypy'
+        return v
+
+    # Error checking
+    _substitute_values_check_errors(command, values)
+
+    # Substitution
+    outcmd: T.List[str] = []
+    rx_keys = [re.escape(key) for key in values if key not in ('@INPUT@', '@OUTPUT@')]
+    value_rx = re.compile('|'.join(rx_keys)) if rx_keys else None
+    for vv in command:
+        more: T.Optional[str] = None
+        if not isinstance(vv, str):
+            outcmd.append(vv)
+        elif '@INPUT@' in vv:
+            inputs = values['@INPUT@']
+            if vv == '@INPUT@':
+                outcmd += inputs
+            elif len(inputs) == 1:
+                outcmd.append(vv.replace('@INPUT@', inputs[0]))
+            else:
+                raise MesonException("Command has '@INPUT@' as part of a "
+                                     "string and more than one input file")
+        elif '@OUTPUT@' in vv:
+            outputs = values['@OUTPUT@']
+            if vv == '@OUTPUT@':
+                outcmd += outputs
+            elif len(outputs) == 1:
+                outcmd.append(vv.replace('@OUTPUT@', outputs[0]))
+            else:
+                raise MesonException("Command has '@OUTPUT@' as part of a "
+                                     "string and more than one output file")
+
+        # Append values that are exactly a template string.
+        # This is faster than a string replace.
+        elif vv in values:
+            o = values[vv]
+            assert isinstance(o, str), 'for mypy'
+            more = o
+        # Substitute everything else with replacement
+        elif value_rx:
+            more = value_rx.sub(replace, vv)
+        else:
+            more = vv
+
+        if more is not None:
+            outcmd.append(more)
+
+    return outcmd
+
+
+def get_filenames_templates_dict(inputs: T.List[str], outputs: T.List[str]) -> T.Dict[str, T.Union[str, T.List[str]]]:
+    '''
+    Create a dictionary with template strings as keys and values as values for
+    the following templates:
+
+    @INPUT@  - the full path to one or more input files, from @inputs
+    @OUTPUT@ - the full path to one or more output files, from @outputs
+    @OUTDIR@ - the full path to the directory containing the output files
+
+    If there is only one input file, the following keys are also created:
+
+    @PLAINNAME@ - the filename of the input file
+    @BASENAME@ - the filename of the input file with the extension removed
+
+    If there is more than one input file, the following keys are also created:
+
+    @INPUT0@, @INPUT1@, ... one for each input file
+
+    If there is more than one output file, the following keys are also created:
+
+    @OUTPUT0@, @OUTPUT1@, ... one for each output file
+    '''
+    values: T.Dict[str, T.Union[str, T.List[str]]] = {}
+    # Gather values derived from the input
+    if inputs:
+        # We want to substitute all the inputs.
+        values['@INPUT@'] = inputs
+        for (ii, vv) in enumerate(inputs):
+            # Write out @INPUT0@, @INPUT1@, ...
+            values[f'@INPUT{ii}@'] = vv
+        if len(inputs) == 1:
+            # Just one value, substitute @PLAINNAME@ and @BASENAME@
+            values['@PLAINNAME@'] = plain = os.path.basename(inputs[0])
+            values['@BASENAME@'] = os.path.splitext(plain)[0]
+    if outputs:
+        # Gather values derived from the outputs, similar to above.
+        values['@OUTPUT@'] = outputs
+        for (ii, vv) in enumerate(outputs):
+            values[f'@OUTPUT{ii}@'] = vv
+        # Outdir should be the same for all outputs
+        values['@OUTDIR@'] = os.path.dirname(outputs[0])
+        # Many external programs fail on empty arguments.
+        if values['@OUTDIR@'] == '':
+            values['@OUTDIR@'] = '.'
+    return values
+
+
+def _make_tree_writable(topdir: str) -> None:
+    # Ensure all files and directories under topdir are writable
+    # (and readable) by owner.
+    for d, _, files in os.walk(topdir):
+        os.chmod(d, os.stat(d).st_mode | stat.S_IWRITE | stat.S_IREAD)
+        for fname in files:
+            fpath = os.path.join(d, fname)
+            if os.path.isfile(fpath):
+                os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
+
+
+def windows_proof_rmtree(f: str) -> None:
+    # On Windows if anyone is holding a file open you can't
+    # delete it. As an example an anti virus scanner might
+    # be scanning files you are trying to delete. The only
+    # way to fix this is to try again and again.
+    delays = [0.1, 0.1, 0.2, 0.2, 0.2, 0.5, 0.5, 1, 1, 1, 1, 2]
+    writable = False
+    for d in delays:
+        try:
+            # Start by making the tree writable.
+            if not writable:
+                _make_tree_writable(f)
+                writable = True
+        except PermissionError:
+            time.sleep(d)
+            continue
+        try:
+            shutil.rmtree(f)
+            return
+        except FileNotFoundError:
+            return
+        except OSError:
+            time.sleep(d)
+    # Try one last time and throw if it fails.
+    shutil.rmtree(f)
+
+
+def windows_proof_rm(fpath: str) -> None:
+    """Like windows_proof_rmtree, but for a single file."""
+    if os.path.isfile(fpath):
+        os.chmod(fpath, os.stat(fpath).st_mode | stat.S_IWRITE | stat.S_IREAD)
+    delays = [0.1, 0.1, 0.2, 0.2, 0.2, 0.5, 0.5, 1, 1, 1, 1, 2]
+    for d in delays:
+        try:
+            os.unlink(fpath)
+            return
+        except FileNotFoundError:
+            return
+        except OSError:
+            time.sleep(d)
+    os.unlink(fpath)
+
+
+class TemporaryDirectoryWinProof(TemporaryDirectory):
+    """
+    Like TemporaryDirectory, but cleans things up using
+    windows_proof_rmtree()
+    """
+
+    def __exit__(self, exc: T.Any, value: T.Any, tb: T.Any) -> None:
+        try:
+            super().__exit__(exc, value, tb)
+        except OSError:
+            windows_proof_rmtree(self.name)
+
+    def cleanup(self) -> None:
+        try:
+            super().cleanup()
+        except OSError:
+            windows_proof_rmtree(self.name)
+
+
+def detect_subprojects(spdir_name: str, current_dir: str = '',
+                       result: T.Optional[T.Dict[str, T.List[str]]] = None) -> T.Dict[str, T.List[str]]:
+    if result is None:
+        result = {}
+    spdir = os.path.join(current_dir, spdir_name)
+    if not os.path.exists(spdir):
+        return result
+    for trial in glob(os.path.join(spdir, '*')):
+        basename = os.path.basename(trial)
+        if trial == 'packagecache':
+            continue
+        append_this = True
+        if os.path.isdir(trial):
+            detect_subprojects(spdir_name, trial, result)
+        elif trial.endswith('.wrap') and os.path.isfile(trial):
+            basename = os.path.splitext(basename)[0]
+        else:
+            append_this = False
+        if append_this:
+            if basename in result:
+                result[basename].append(trial)
+            else:
+                result[basename] = [trial]
+    return result
+
+
+def substring_is_in_list(substr: str, strlist: T.List[str]) -> bool:
+    for s in strlist:
+        if substr in s:
+            return True
+    return False
+
+
+class OrderedSet(T.MutableSet[_T]):
+    """A set that preserves the order in which items are added, by first
+    insertion.
+    """
+    def __init__(self, iterable: T.Optional[T.Iterable[_T]] = None):
+        self.__container: T.OrderedDict[_T, None] = collections.OrderedDict()
+        if iterable:
+            self.update(iterable)
+
+    def __contains__(self, value: object) -> bool:
+        return value in self.__container
+
+    def __iter__(self) -> T.Iterator[_T]:
+        return iter(self.__container.keys())
+
+    def __len__(self) -> int:
+        return len(self.__container)
+
+    def __repr__(self) -> str:
+        # Don't print 'OrderedSet("")' for an empty set.
+        if self.__container:
+            return 'OrderedSet("{}")'.format(
+                '", "'.join(repr(e) for e in self.__container.keys()))
+        return 'OrderedSet()'
+
+    def __reversed__(self) -> T.Iterator[_T]:
+        return reversed(self.__container.keys())
+
+    def add(self, value: _T) -> None:
+        self.__container[value] = None
+
+    def discard(self, value: _T) -> None:
+        if value in self.__container:
+            del self.__container[value]
+
+    def move_to_end(self, value: _T, last: bool = True) -> None:
+        self.__container.move_to_end(value, last)
+
+    def pop(self, last: bool = True) -> _T:
+        item, _ = self.__container.popitem(last)
+        return item
+
+    def update(self, iterable: T.Iterable[_T]) -> None:
+        for item in iterable:
+            self.__container[item] = None
+
+    def difference(self, set_: T.Iterable[_T]) -> 'OrderedSet[_T]':
+        return type(self)(e for e in self if e not in set_)
+
+    def difference_update(self, iterable: T.Iterable[_T]) -> None:
+        for item in iterable:
+            self.discard(item)
+
+def relpath(path: str, start: str) -> str:
+    # On Windows a relative path can't be evaluated for paths on two different
+    # drives (i.e. c:\foo and f:\bar).  The only thing left to do is to use the
+    # original absolute path.
+    try:
+        return os.path.relpath(path, start)
+    except (TypeError, ValueError):
+        return path
+
+def path_is_in_root(path: Path, root: Path, resolve: bool = False) -> bool:
+    # Check whether a path is within the root directory root
+    try:
+        if resolve:
+            path.resolve().relative_to(root.resolve())
+        else:
+            path.relative_to(root)
+    except ValueError:
+        return False
+    return True
+
+def relative_to_if_possible(path: Path, root: Path, resolve: bool = False) -> Path:
+    try:
+        if resolve:
+            return path.resolve().relative_to(root.resolve())
+        else:
+            return path.relative_to(root)
+    except ValueError:
+        return path
+
+class LibType(enum.IntEnum):
+
+    """Enumeration for library types."""
+
+    SHARED = 0
+    STATIC = 1
+    PREFER_SHARED = 2
+    PREFER_STATIC = 3
+
+
+class ProgressBarFallback:  # lgtm [py/iter-returns-non-self]
+    '''
+    Fallback progress bar implementation when tqdm is not found
+
+    Since this class is not an actual iterator, but only provides a minimal
+    fallback, it is safe to ignore the 'Iterator does not return self from
+    __iter__ method' warning.
+    '''
+    def __init__(self, iterable: T.Optional[T.Iterable[str]] = None, total: T.Optional[int] = None,
+                 bar_type: T.Optional[str] = None, desc: T.Optional[str] = None,
+                 disable: T.Optional[bool] = None):
+        if iterable is not None:
+            self.iterable = iter(iterable)
+            return
+        self.total = total
+        self.done = 0
+        self.printed_dots = 0
+        self.disable = not mlog.colorize_console() if disable is None else disable
+        if not self.disable:
+            if self.total and bar_type == 'download':
+                print('Download size:', self.total)
+            if desc:
+                print(f'{desc}: ', end='')
+
+    # Pretend to be an iterator when called as one and don't print any
+    # progress
+    def __iter__(self) -> T.Iterator[str]:
+        return self.iterable
+
+    def __next__(self) -> str:
+        return next(self.iterable)
+
+    def print_dot(self) -> None:
+        if not self.disable:
+            print('.', end='')
+            sys.stdout.flush()
+        self.printed_dots += 1
+
+    def update(self, progress: int) -> None:
+        self.done += progress
+        if not self.total:
+            # Just print one dot per call if we don't have a total length
+            self.print_dot()
+            return
+        ratio = int(self.done / self.total * 10)
+        while self.printed_dots < ratio:
+            self.print_dot()
+
+    def close(self) -> None:
+        if not self.disable:
+            print()
+
+try:
+    from tqdm import tqdm
+except ImportError:
+    # ideally we would use a typing.Protocol here, but it's part of typing_extensions until 3.8
+    ProgressBar: T.Union[T.Type[ProgressBarFallback], T.Type[ProgressBarTqdm]] = ProgressBarFallback
+else:
+    class ProgressBarTqdm(tqdm):
+        def __init__(self, *args: T.Any, bar_type: T.Optional[str] = None, **kwargs: T.Any) -> None:
+            if bar_type == 'download':
+                kwargs.update({'unit': 'B',
+                               'unit_scale': True,
+                               'unit_divisor': 1024,
+                               'leave': True,
+                               'bar_format': '{l_bar}{bar}| {n_fmt}/{total_fmt} {rate_fmt} eta {remaining}',
+                               })
+
+            else:
+                kwargs.update({'leave': False,
+                               'bar_format': '{l_bar}{bar}| {n_fmt}/{total_fmt} eta {remaining}',
+                               })
+            super().__init__(*args, **kwargs)
+
+    ProgressBar = ProgressBarTqdm
+
+
+class RealPathAction(argparse.Action):
+    def __init__(self, option_strings: T.List[str], dest: str, default: str = '.', **kwargs: T.Any):
+        default = os.path.abspath(os.path.realpath(default))
+        super().__init__(option_strings, dest, nargs=None, default=default, **kwargs)
+
+    def __call__(self, parser: argparse.ArgumentParser, namespace: argparse.Namespace,
+                 values: T.Union[str, T.Sequence[T.Any], None], option_string: T.Optional[str] = None) -> None:
+        assert isinstance(values, str)
+        setattr(namespace, self.dest, os.path.abspath(os.path.realpath(values)))
+
+
+def get_wine_shortpath(winecmd: T.List[str], wine_paths: T.List[str],
+                       workdir: T.Optional[str] = None) -> str:
+    '''
+    WINEPATH size is limited to 1024 bytes which can easily be exceeded when
+    adding the path to every dll inside build directory. See
+    https://bugs.winehq.org/show_bug.cgi?id=45810.
+
+    To shorten it as much as possible we use path relative to `workdir`
+    where possible and convert absolute paths to Windows shortpath (e.g.
+    "/usr/x86_64-w64-mingw32/lib" to "Z:\\usr\\X86_~FWL\\lib").
+
+    This limitation reportedly has been fixed with wine >= 6.4
+    '''
+
+    # Remove duplicates
+    wine_paths = list(OrderedSet(wine_paths))
+
+    # Check if it's already short enough
+    wine_path = ';'.join(wine_paths)
+    if len(wine_path) <= 1024:
+        return wine_path
+
+    # Check if we have wine >= 6.4
+    from ..programs import ExternalProgram
+    wine = ExternalProgram('wine', winecmd, silent=True)
+    if version_compare(wine.get_version(), '>=6.4'):
+        return wine_path
+
+    # Check paths that can be reduced by making them relative to workdir.
+    rel_paths: T.List[str] = []
+    if workdir:
+        abs_paths: T.List[str] = []
+        for p in wine_paths:
+            try:
+                rel = Path(p).relative_to(workdir)
+                rel_paths.append(str(rel))
+            except ValueError:
+                abs_paths.append(p)
+        wine_paths = abs_paths
+
+    if wine_paths:
+        # BAT script that takes a list of paths in argv and prints semi-colon separated shortpaths
+        with NamedTemporaryFile('w', suffix='.bat', encoding='utf-8', delete=False) as bat_file:
+            bat_file.write('''
+            @ECHO OFF
+            for %%x in (%*) do (
+                echo|set /p=;%~sx
+            )
+            ''')
+        try:
+            stdout = subprocess.check_output(winecmd + ['cmd', '/C', bat_file.name] + wine_paths,
+                                             encoding='utf-8', stderr=subprocess.DEVNULL)
+            stdout = stdout.strip(';')
+            if stdout:
+                wine_paths = stdout.split(';')
+            else:
+                mlog.warning('Could not shorten WINEPATH: empty stdout')
+        except subprocess.CalledProcessError as e:
+            mlog.warning(f'Could not shorten WINEPATH: {str(e)}')
+        finally:
+            os.unlink(bat_file.name)
+    wine_path = ';'.join(rel_paths + wine_paths)
+    if len(wine_path) > 1024:
+        mlog.warning('WINEPATH exceeds 1024 characters which could cause issues')
+    return wine_path
+
+
+def run_once(func: T.Callable[..., _T]) -> T.Callable[..., _T]:
+    ret: T.List[_T] = []
+
+    @wraps(func)
+    def wrapper(*args: T.Any, **kwargs: T.Any) -> _T:
+        if ret:
+            return ret[0]
+
+        val = func(*args, **kwargs)
+        ret.append(val)
+        return val
+
+    return wrapper
+
+
+def generate_list(func: T.Callable[..., T.Generator[_T, None, None]]) -> T.Callable[..., T.List[_T]]:
+    @wraps(func)
+    def wrapper(*args: T.Any, **kwargs: T.Any) -> T.List[_T]:
+        return list(func(*args, **kwargs))
+
+    return wrapper
+
+
+class OptionOverrideProxy(collections.abc.Mapping):
+    '''Mimic an option list but transparently override selected option
+    values.
+    '''
+
+    # TODO: the typing here could be made more explicit using a TypeDict from
+    # python 3.8 or typing_extensions
+
+    def __init__(self, overrides: T.Dict['OptionKey', T.Any], options: 'KeyedOptionDictType',
+                 subproject: T.Optional[str] = None):
+        self.overrides = overrides
+        self.options = options
+        self.subproject = subproject
+
+    def __getitem__(self, key: 'OptionKey') -> 'UserOption':
+        # FIXME: This is fundamentally the same algorithm than interpreter.get_option_internal().
+        # We should try to share the code somehow.
+        key = key.evolve(subproject=self.subproject)
+        if not key.is_project():
+            opt = self.options.get(key)
+            if opt is None or opt.yielding:
+                opt = self.options[key.as_root()]
+        else:
+            opt = self.options[key]
+            if opt.yielding:
+                opt = self.options.get(key.as_root(), opt)
+        override_value = self.overrides.get(key.as_root())
+        if override_value is not None:
+            opt = copy.copy(opt)
+            opt.set_value(override_value)
+        return opt
+
+    def __iter__(self) -> T.Iterator['OptionKey']:
+        return iter(self.options)
+
+    def __len__(self) -> int:
+        return len(self.options)
+
+    def __eq__(self, other: object) -> bool:
+        if not isinstance(other, OptionOverrideProxy):
+            return NotImplemented
+        t1 = (self.overrides, self.subproject, self.options)
+        t2 = (other.overrides, other.subproject, other.options)
+        return t1 == t2
+
+
+class OptionType(enum.IntEnum):
+
+    """Enum used to specify what kind of argument a thing is."""
+
+    BUILTIN = 0
+    BACKEND = 1
+    BASE = 2
+    COMPILER = 3
+    PROJECT = 4
+
+# This is copied from coredata. There is no way to share this, because this
+# is used in the OptionKey constructor, and the coredata lists are
+# OptionKeys...
+_BUILTIN_NAMES = {
+    'prefix',
+    'bindir',
+    'datadir',
+    'includedir',
+    'infodir',
+    'libdir',
+    'licensedir',
+    'libexecdir',
+    'localedir',
+    'localstatedir',
+    'mandir',
+    'sbindir',
+    'sharedstatedir',
+    'sysconfdir',
+    'auto_features',
+    'backend',
+    'buildtype',
+    'debug',
+    'default_library',
+    'errorlogs',
+    'genvslite',
+    'install_umask',
+    'layout',
+    'optimization',
+    'prefer_static',
+    'stdsplit',
+    'strip',
+    'unity',
+    'unity_size',
+    'warning_level',
+    'werror',
+    'wrap_mode',
+    'force_fallback_for',
+    'pkg_config_path',
+    'cmake_prefix_path',
+    'vsenv',
+}
+
+
+def _classify_argument(key: 'OptionKey') -> OptionType:
+    """Classify arguments into groups so we know which dict to assign them to."""
+
+    if key.name.startswith('b_'):
+        return OptionType.BASE
+    elif key.lang is not None:
+        return OptionType.COMPILER
+    elif key.name in _BUILTIN_NAMES or key.module:
+        return OptionType.BUILTIN
+    elif key.name.startswith('backend_'):
+        assert key.machine is MachineChoice.HOST, str(key)
+        return OptionType.BACKEND
+    else:
+        assert key.machine is MachineChoice.HOST, str(key)
+        return OptionType.PROJECT
+
+
+@total_ordering
+class OptionKey:
+
+    """Represents an option key in the various option dictionaries.
+
+    This provides a flexible, powerful way to map option names from their
+    external form (things like subproject:build.option) to something that
+    internally easier to reason about and produce.
+    """
+
+    __slots__ = ['name', 'subproject', 'machine', 'lang', '_hash', 'type', 'module']
+
+    name: str
+    subproject: str
+    machine: MachineChoice
+    lang: T.Optional[str]
+    _hash: int
+    type: OptionType
+    module: T.Optional[str]
+
+    def __init__(self, name: str, subproject: str = '',
+                 machine: MachineChoice = MachineChoice.HOST,
+                 lang: T.Optional[str] = None,
+                 module: T.Optional[str] = None,
+                 _type: T.Optional[OptionType] = None):
+        # the _type option to the constructor is kinda private. We want to be
+        # able tos ave the state and avoid the lookup function when
+        # pickling/unpickling, but we need to be able to calculate it when
+        # constructing a new OptionKey
+        object.__setattr__(self, 'name', name)
+        object.__setattr__(self, 'subproject', subproject)
+        object.__setattr__(self, 'machine', machine)
+        object.__setattr__(self, 'lang', lang)
+        object.__setattr__(self, 'module', module)
+        object.__setattr__(self, '_hash', hash((name, subproject, machine, lang, module)))
+        if _type is None:
+            _type = _classify_argument(self)
+        object.__setattr__(self, 'type', _type)
+
+    def __setattr__(self, key: str, value: T.Any) -> None:
+        raise AttributeError('OptionKey instances do not support mutation.')
+
+    def __getstate__(self) -> T.Dict[str, T.Any]:
+        return {
+            'name': self.name,
+            'subproject': self.subproject,
+            'machine': self.machine,
+            'lang': self.lang,
+            '_type': self.type,
+            'module': self.module,
+        }
+
+    def __setstate__(self, state: T.Dict[str, T.Any]) -> None:
+        """De-serialize the state of a pickle.
+
+        This is very clever. __init__ is not a constructor, it's an
+        initializer, therefore it's safe to call more than once. We create a
+        state in the custom __getstate__ method, which is valid to pass
+        splatted to the initializer.
+        """
+        # Mypy doesn't like this, because it's so clever.
+        self.__init__(**state)  # type: ignore
+
+    def __hash__(self) -> int:
+        return self._hash
+
+    def _to_tuple(self) -> T.Tuple[str, OptionType, str, str, MachineChoice, str]:
+        return (self.subproject, self.type, self.lang or '', self.module or '', self.machine, self.name)
+
+    def __eq__(self, other: object) -> bool:
+        if isinstance(other, OptionKey):
+            return self._to_tuple() == other._to_tuple()
+        return NotImplemented
+
+    def __lt__(self, other: object) -> bool:
+        if isinstance(other, OptionKey):
+            return self._to_tuple() < other._to_tuple()
+        return NotImplemented
+
+    def __str__(self) -> str:
+        out = self.name
+        if self.lang:
+            out = f'{self.lang}_{out}'
+        if self.machine is MachineChoice.BUILD:
+            out = f'build.{out}'
+        if self.module:
+            out = f'{self.module}.{out}'
+        if self.subproject:
+            out = f'{self.subproject}:{out}'
+        return out
+
+    def __repr__(self) -> str:
+        return f'OptionKey({self.name!r}, {self.subproject!r}, {self.machine!r}, {self.lang!r}, {self.module!r}, {self.type!r})'
+
+    @classmethod
+    def from_string(cls, raw: str) -> 'OptionKey':
+        """Parse the raw command line format into a three part tuple.
+
+        This takes strings like `mysubproject:build.myoption` and Creates an
+        OptionKey out of them.
+        """
+        try:
+            subproject, raw2 = raw.split(':')
+        except ValueError:
+            subproject, raw2 = '', raw
+
+        module = None
+        for_machine = MachineChoice.HOST
+        try:
+            prefix, raw3 = raw2.split('.')
+            if prefix == 'build':
+                for_machine = MachineChoice.BUILD
+            else:
+                module = prefix
+        except ValueError:
+            raw3 = raw2
+
+        from ..compilers import all_languages
+        if any(raw3.startswith(f'{l}_') for l in all_languages):
+            lang, opt = raw3.split('_', 1)
+        else:
+            lang, opt = None, raw3
+        assert ':' not in opt
+        assert '.' not in opt
+
+        return cls(opt, subproject, for_machine, lang, module)
+
+    def evolve(self, name: T.Optional[str] = None, subproject: T.Optional[str] = None,
+               machine: T.Optional[MachineChoice] = None, lang: T.Optional[str] = '',
+               module: T.Optional[str] = '') -> 'OptionKey':
+        """Create a new copy of this key, but with altered members.
+
+        For example:
+        >>> a = OptionKey('foo', '', MachineChoice.Host)
+        >>> b = OptionKey('foo', 'bar', MachineChoice.Host)
+        >>> b == a.evolve(subproject='bar')
+        True
+        """
+        # We have to be a little clever with lang here, because lang is valid
+        # as None, for non-compiler options
+        return OptionKey(
+            name if name is not None else self.name,
+            subproject if subproject is not None else self.subproject,
+            machine if machine is not None else self.machine,
+            lang if lang != '' else self.lang,
+            module if module != '' else self.module
+        )
+
+    def as_root(self) -> 'OptionKey':
+        """Convenience method for key.evolve(subproject='')."""
+        return self.evolve(subproject='')
+
+    def as_build(self) -> 'OptionKey':
+        """Convenience method for key.evolve(machine=MachineChoice.BUILD)."""
+        return self.evolve(machine=MachineChoice.BUILD)
+
+    def as_host(self) -> 'OptionKey':
+        """Convenience method for key.evolve(machine=MachineChoice.HOST)."""
+        return self.evolve(machine=MachineChoice.HOST)
+
+    def is_backend(self) -> bool:
+        """Convenience method to check if this is a backend option."""
+        return self.type is OptionType.BACKEND
+
+    def is_builtin(self) -> bool:
+        """Convenience method to check if this is a builtin option."""
+        return self.type is OptionType.BUILTIN
+
+    def is_compiler(self) -> bool:
+        """Convenience method to check if this is a builtin option."""
+        return self.type is OptionType.COMPILER
+
+    def is_project(self) -> bool:
+        """Convenience method to check if this is a project option."""
+        return self.type is OptionType.PROJECT
+
+    def is_base(self) -> bool:
+        """Convenience method to check if this is a base option."""
+        return self.type is OptionType.BASE
+
+
+def pickle_load(filename: str, object_name: str, object_type: T.Type[_PL]) -> _PL:
+    load_fail_msg = f'{object_name} file {filename!r} is corrupted. Try with a fresh build tree.'
+    try:
+        with open(filename, 'rb') as f:
+            obj = pickle.load(f)
+    except (pickle.UnpicklingError, EOFError):
+        raise MesonException(load_fail_msg)
+    except (TypeError, ModuleNotFoundError, AttributeError):
+        build_dir = os.path.dirname(os.path.dirname(filename))
+        raise MesonException(
+            f"{object_name} file {filename!r} references functions or classes that don't "
+            "exist. This probably means that it was generated with an old "
+            "version of meson. Try running from the source directory "
+            f'meson setup {build_dir} --wipe')
+    if not isinstance(obj, object_type):
+        raise MesonException(load_fail_msg)
+
+    # Because these Protocols are not available at runtime (and cannot be made
+    # available at runtime until we drop support for Python < 3.8), we have to
+    # do a bit of hackery so that mypy understands what's going on here
+    version: str
+    if hasattr(obj, 'version'):
+        version = T.cast('_VerPickleLoadable', obj).version
+    else:
+        version = T.cast('_EnvPickleLoadable', obj).environment.coredata.version
+
+    from ..coredata import version as coredata_version
+    from ..coredata import major_versions_differ, MesonVersionMismatchException
+    if major_versions_differ(version, coredata_version):
+        raise MesonVersionMismatchException(version, coredata_version)
+    return obj
+
+
+def first(iter: T.Iterable[_T], predicate: T.Callable[[_T], bool]) -> T.Optional[_T]:
+    """Find the first entry in an iterable where the given predicate is true
+
+    :param iter: The iterable to search
+    :param predicate: A finding function that takes an element from the iterable
+        and returns True if found, otherwise False
+    :return: The first found element, or None if it is not found
+    """
+    for i in iter:
+        if predicate(i):
+            return i
+    return None
diff --git a/vendored-meson/meson/mesonbuild/utils/vsenv.py b/vendored-meson/meson/mesonbuild/utils/vsenv.py
new file mode 100644
index 000000000000..3c2687884de2
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/utils/vsenv.py
@@ -0,0 +1,123 @@
+from __future__ import annotations
+
+import os
+import subprocess
+import json
+import pathlib
+import shutil
+import tempfile
+
+from .. import mlog
+from .universal import MesonException, is_windows, windows_detect_native_arch
+
+
+__all__ = [
+    'setup_vsenv',
+]
+
+
+bat_template = '''@ECHO OFF
+
+call "{}"
+
+ECHO {}
+SET
+'''
+
+# If on Windows and VS is installed but not set up in the environment,
+# set it to be runnable. In this way Meson can be directly invoked
+# from any shell, VS Code etc.
+def _setup_vsenv(force: bool) -> bool:
+    if not is_windows():
+        return False
+    if os.environ.get('OSTYPE') == 'cygwin':
+        return False
+    if 'MESON_FORCE_VSENV_FOR_UNITTEST' not in os.environ:
+        # VSINSTALL is set when running setvars from a Visual Studio installation
+        # Tested with Visual Studio 2012 and 2017
+        if 'VSINSTALLDIR' in os.environ:
+            return False
+        # Check explicitly for cl when on Windows
+        if shutil.which('cl.exe'):
+            return False
+    if not force:
+        if shutil.which('cc'):
+            return False
+        if shutil.which('gcc'):
+            return False
+        if shutil.which('clang'):
+            return False
+        if shutil.which('clang-cl'):
+            return False
+
+    root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
+    bat_locator_bin = pathlib.Path(root, 'Microsoft Visual Studio/Installer/vswhere.exe')
+    if not bat_locator_bin.exists():
+        raise MesonException(f'Could not find {bat_locator_bin}')
+    bat_json = subprocess.check_output(
+        [
+            str(bat_locator_bin),
+            '-latest',
+            '-prerelease',
+            '-requiresAny',
+            '-requires', 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64',
+            '-requires', 'Microsoft.VisualStudio.Workload.WDExpress',
+            '-products', '*',
+            '-utf8',
+            '-format',
+            'json'
+        ]
+    )
+    bat_info = json.loads(bat_json)
+    if not bat_info:
+        # VS installer installed but not VS itself maybe?
+        raise MesonException('Could not parse vswhere.exe output')
+    bat_root = pathlib.Path(bat_info[0]['installationPath'])
+    if windows_detect_native_arch() == 'arm64':
+        bat_path = bat_root / 'VC/Auxiliary/Build/vcvarsarm64.bat'
+        if not bat_path.exists():
+            bat_path = bat_root / 'VC/Auxiliary/Build/vcvarsx86_arm64.bat'
+    else:
+        bat_path = bat_root / 'VC/Auxiliary/Build/vcvars64.bat'
+        # if VS is not found try VS Express
+        if not bat_path.exists():
+            bat_path = bat_root / 'VC/Auxiliary/Build/vcvarsx86_amd64.bat'
+    if not bat_path.exists():
+        raise MesonException(f'Could not find {bat_path}')
+
+    mlog.log('Activating VS', bat_info[0]['catalog']['productDisplayVersion'])
+    bat_separator = '---SPLIT---'
+    bat_contents = bat_template.format(bat_path, bat_separator)
+    bat_file = tempfile.NamedTemporaryFile('w', suffix='.bat', encoding='utf-8', delete=False)
+    bat_file.write(bat_contents)
+    bat_file.flush()
+    bat_file.close()
+    bat_output = subprocess.check_output(bat_file.name, universal_newlines=True)
+    os.unlink(bat_file.name)
+    bat_lines = bat_output.split('\n')
+    bat_separator_seen = False
+    for bat_line in bat_lines:
+        if bat_line == bat_separator:
+            bat_separator_seen = True
+            continue
+        if not bat_separator_seen:
+            continue
+        if not bat_line:
+            continue
+        try:
+            k, v = bat_line.split('=', 1)
+        except ValueError:
+            # there is no "=", ignore junk data
+            pass
+        else:
+            os.environ[k] = v
+    return True
+
+def setup_vsenv(force: bool = False) -> bool:
+    try:
+        return _setup_vsenv(force)
+    except MesonException as e:
+        if force:
+            raise
+        mlog.warning('Failed to activate VS environment:', str(e))
+        return False
diff --git a/vendored-meson/meson/mesonbuild/utils/win32.py b/vendored-meson/meson/mesonbuild/utils/win32.py
new file mode 100644
index 000000000000..2bd4cba8d0f0
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/utils/win32.py
@@ -0,0 +1,40 @@
+# SPDX-license-identifier: Apache-2.0
+# Copyright 2012-2021 The Meson development team
+# Copyright © 2021 Intel Corporation
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+"""Windows specific implementations of mesonlib functionality."""
+
+import msvcrt
+import typing as T
+
+from .universal import MesonException
+from .platform import BuildDirLock as BuildDirLockBase
+
+__all__ = ['BuildDirLock']
+
+class BuildDirLock(BuildDirLockBase):
+
+    def __enter__(self) -> None:
+        self.lockfile = open(self.lockfilename, 'w', encoding='utf-8')
+        try:
+            msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1)
+        except (BlockingIOError, PermissionError):
+            self.lockfile.close()
+            raise MesonException('Some other Meson process is already using this build directory. Exiting.')
+
+    def __exit__(self, *args: T.Any) -> None:
+        msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1)
+        self.lockfile.close()
diff --git a/vendored-meson/meson/mesonbuild/wrap/__init__.py b/vendored-meson/meson/mesonbuild/wrap/__init__.py
new file mode 100644
index 000000000000..653f42ab92d8
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/wrap/__init__.py
@@ -0,0 +1,59 @@
+from enum import Enum
+
+# Used for the --wrap-mode command-line argument
+#
+# Special wrap modes:
+#   nofallback: Don't download wraps for dependency() fallbacks
+#   nodownload: Don't download wraps for all subproject() calls
+#
+# subprojects are used for two purposes:
+# 1. To download and build dependencies by using .wrap
+#    files if they are not provided by the system. This is
+#    usually expressed via dependency(..., fallback: ...).
+# 2. To download and build 'copylibs' which are meant to be
+#    used by copying into your project. This is always done
+#    with an explicit subproject() call.
+#
+# --wrap-mode=nofallback will never do (1)
+# --wrap-mode=nodownload will do neither (1) nor (2)
+#
+# If you are building from a release tarball, you should be
+# able to safely use 'nodownload' since upstream is
+# expected to ship all required sources with the tarball.
+#
+# If you are building from a git repository, you will want
+# to use 'nofallback' so that any 'copylib' wraps will be
+# download as subprojects.
+#
+# --wrap-mode=forcefallback will ignore external dependencies,
+# even if they match the version requirements, and automatically
+# use the fallback if one was provided. This is useful for example
+# to make sure a project builds when using the fallbacks.
+#
+# Note that these options do not affect subprojects that
+# are git submodules since those are only usable in git
+# repositories, and you almost always want to download them.
+
+# This did _not_ work when inside the WrapMode class.
+# I don't know why. If you can fix this, patches welcome.
+string_to_value = {'default': 1,
+                   'nofallback': 2,
+                   'nodownload': 3,
+                   'forcefallback': 4,
+                   'nopromote': 5,
+                   }
+
+class WrapMode(Enum):
+    default = 1
+    nofallback = 2
+    nodownload = 3
+    forcefallback = 4
+    nopromote = 5
+
+    def __str__(self) -> str:
+        return self.name
+
+    @staticmethod
+    def from_string(mode_name: str) -> 'WrapMode':
+        g = string_to_value[mode_name]
+        return WrapMode(g)
diff --git a/vendored-meson/meson/mesonbuild/wrap/wrap.py b/vendored-meson/meson/mesonbuild/wrap/wrap.py
new file mode 100644
index 000000000000..2b0a0ba9eb84
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/wrap/wrap.py
@@ -0,0 +1,834 @@
+# Copyright 2015 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+from .. import mlog
+import contextlib
+from dataclasses import dataclass
+import urllib.request
+import urllib.error
+import urllib.parse
+import os
+import hashlib
+import shutil
+import tempfile
+import stat
+import subprocess
+import sys
+import configparser
+import time
+import typing as T
+import textwrap
+import json
+
+from base64 import b64encode
+from netrc import netrc
+from pathlib import Path, PurePath
+
+from . import WrapMode
+from .. import coredata
+from ..mesonlib import quiet_git, GIT, ProgressBar, MesonException, windows_proof_rmtree, Popen_safe
+from ..interpreterbase import FeatureNew
+from ..interpreterbase import SubProject
+from .. import mesonlib
+
+if T.TYPE_CHECKING:
+    import http.client
+
+try:
+    # Importing is just done to check if SSL exists, so all warnings
+    # regarding 'imported but unused' can be safely ignored
+    import ssl  # noqa
+    has_ssl = True
+except ImportError:
+    has_ssl = False
+
+REQ_TIMEOUT = 30.0
+WHITELIST_SUBDOMAIN = 'wrapdb.mesonbuild.com'
+
+ALL_TYPES = ['file', 'git', 'hg', 'svn']
+
+PATCH = shutil.which('patch')
+
+def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult:
+    """ raises WrapException if not whitelisted subdomain """
+    url = urllib.parse.urlparse(urlstr)
+    if not url.hostname:
+        raise WrapException(f'{urlstr} is not a valid URL')
+    if not url.hostname.endswith(WHITELIST_SUBDOMAIN):
+        raise WrapException(f'{urlstr} is not a whitelisted WrapDB URL')
+    if has_ssl and not url.scheme == 'https':
+        raise WrapException(f'WrapDB did not have expected SSL https url, instead got {urlstr}')
+    return url
+
+def open_wrapdburl(urlstring: str, allow_insecure: bool = False, have_opt: bool = False) -> 'http.client.HTTPResponse':
+    if have_opt:
+        insecure_msg = '\n\n    To allow connecting anyway, pass `--allow-insecure`.'
+    else:
+        insecure_msg = ''
+
+    url = whitelist_wrapdb(urlstring)
+    if has_ssl:
+        try:
+            return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(url), timeout=REQ_TIMEOUT))
+        except urllib.error.URLError as excp:
+            msg = f'WrapDB connection failed to {urlstring} with error {excp}.'
+            if isinstance(excp.reason, ssl.SSLCertVerificationError):
+                if allow_insecure:
+                    mlog.warning(f'{msg}\n\n    Proceeding without authentication.')
+                else:
+                    raise WrapException(f'{msg}{insecure_msg}')
+            else:
+                raise WrapException(msg)
+    elif not allow_insecure:
+        raise WrapException(f'SSL module not available in {sys.executable}: Cannot contact the WrapDB.{insecure_msg}')
+    else:
+        # following code is only for those without Python SSL
+        mlog.warning(f'SSL module not available in {sys.executable}: WrapDB traffic not authenticated.', once=True)
+
+    # If we got this far, allow_insecure was manually passed
+    nossl_url = url._replace(scheme='http')
+    try:
+        return T.cast('http.client.HTTPResponse', urllib.request.urlopen(urllib.parse.urlunparse(nossl_url), timeout=REQ_TIMEOUT))
+    except urllib.error.URLError as excp:
+        raise WrapException(f'WrapDB connection failed to {urlstring} with error {excp}')
+
+def get_releases_data(allow_insecure: bool) -> bytes:
+    url = open_wrapdburl('https://wrapdb.mesonbuild.com/v2/releases.json', allow_insecure, True)
+    return url.read()
+
+def get_releases(allow_insecure: bool) -> T.Dict[str, T.Any]:
+    data = get_releases_data(allow_insecure)
+    return T.cast('T.Dict[str, T.Any]', json.loads(data.decode()))
+
+def update_wrap_file(wrapfile: str, name: str, new_version: str, new_revision: str, allow_insecure: bool) -> None:
+    url = open_wrapdburl(f'https://wrapdb.mesonbuild.com/v2/{name}_{new_version}-{new_revision}/{name}.wrap',
+                         allow_insecure, True)
+    with open(wrapfile, 'wb') as f:
+        f.write(url.read())
+
+def parse_patch_url(patch_url: str) -> T.Tuple[str, str]:
+    u = urllib.parse.urlparse(patch_url)
+    if u.netloc != 'wrapdb.mesonbuild.com':
+        raise WrapException(f'URL {patch_url} does not seems to be a wrapdb patch')
+    arr = u.path.strip('/').split('/')
+    if arr[0] == 'v1':
+        # e.g. https://wrapdb.mesonbuild.com/v1/projects/zlib/1.2.11/5/get_zip
+        return arr[-3], arr[-2]
+    elif arr[0] == 'v2':
+        # e.g. https://wrapdb.mesonbuild.com/v2/zlib_1.2.11-5/get_patch
+        tag = arr[-2]
+        _, version = tag.rsplit('_', 1)
+        version, revision = version.rsplit('-', 1)
+        return version, revision
+    else:
+        raise WrapException(f'Invalid wrapdb URL {patch_url}')
+
+class WrapException(MesonException):
+    pass
+
+class WrapNotFoundException(WrapException):
+    pass
+
+class PackageDefinition:
+    def __init__(self, fname: str, subproject: str = ''):
+        self.filename = fname
+        self.subproject = SubProject(subproject)
+        self.type = None  # type: T.Optional[str]
+        self.values = {} # type: T.Dict[str, str]
+        self.provided_deps = {} # type: T.Dict[str, T.Optional[str]]
+        self.provided_programs = [] # type: T.List[str]
+        self.diff_files = [] # type: T.List[Path]
+        self.basename = os.path.basename(fname)
+        self.has_wrap = self.basename.endswith('.wrap')
+        self.name = self.basename[:-5] if self.has_wrap else self.basename
+        # must be lowercase for consistency with dep=variable assignment
+        self.provided_deps[self.name.lower()] = None
+        # What the original file name was before redirection
+        self.original_filename = fname
+        self.redirected = False
+        if self.has_wrap:
+            self.parse_wrap()
+            with open(fname, 'r', encoding='utf-8') as file:
+                self.wrapfile_hash = hashlib.sha256(file.read().encode('utf-8')).hexdigest()
+        self.directory = self.values.get('directory', self.name)
+        if os.path.dirname(self.directory):
+            raise WrapException('Directory key must be a name and not a path')
+        if self.type and self.type not in ALL_TYPES:
+            raise WrapException(f'Unknown wrap type {self.type!r}')
+        self.filesdir = os.path.join(os.path.dirname(self.filename), 'packagefiles')
+
+    def parse_wrap(self) -> None:
+        try:
+            config = configparser.ConfigParser(interpolation=None)
+            config.read(self.filename, encoding='utf-8')
+        except configparser.Error as e:
+            raise WrapException(f'Failed to parse {self.basename}: {e!s}')
+        self.parse_wrap_section(config)
+        if self.type == 'redirect':
+            # [wrap-redirect] have a `filename` value pointing to the real wrap
+            # file we should parse instead. It must be relative to the current
+            # wrap file location and must be in the form foo/subprojects/bar.wrap.
+            dirname = Path(self.filename).parent
+            fname = Path(self.values['filename'])
+            for i, p in enumerate(fname.parts):
+                if i % 2 == 0:
+                    if p == '..':
+                        raise WrapException('wrap-redirect filename cannot contain ".."')
+                else:
+                    if p != 'subprojects':
+                        raise WrapException('wrap-redirect filename must be in the form foo/subprojects/bar.wrap')
+            if fname.suffix != '.wrap':
+                raise WrapException('wrap-redirect filename must be a .wrap file')
+            fname = dirname / fname
+            if not fname.is_file():
+                raise WrapException(f'wrap-redirect {fname} filename does not exist')
+            self.filename = str(fname)
+            self.parse_wrap()
+            self.redirected = True
+        else:
+            self.parse_provide_section(config)
+        if 'patch_directory' in self.values:
+            FeatureNew('Wrap files with patch_directory', '0.55.0').use(self.subproject)
+        for what in ['patch', 'source']:
+            if f'{what}_filename' in self.values and f'{what}_url' not in self.values:
+                FeatureNew(f'Local wrap patch files without {what}_url', '0.55.0').use(self.subproject)
+
+    def parse_wrap_section(self, config: configparser.ConfigParser) -> None:
+        if len(config.sections()) < 1:
+            raise WrapException(f'Missing sections in {self.basename}')
+        self.wrap_section = config.sections()[0]
+        if not self.wrap_section.startswith('wrap-'):
+            raise WrapException(f'{self.wrap_section!r} is not a valid first section in {self.basename}')
+        self.type = self.wrap_section[5:]
+        self.values = dict(config[self.wrap_section])
+        if 'diff_files' in self.values:
+            FeatureNew('Wrap files with diff_files', '0.63.0').use(self.subproject)
+            for s in self.values['diff_files'].split(','):
+                path = Path(s.strip())
+                if path.is_absolute():
+                    raise WrapException('diff_files paths cannot be absolute')
+                if '..' in path.parts:
+                    raise WrapException('diff_files paths cannot contain ".."')
+                self.diff_files.append(path)
+
+    def parse_provide_section(self, config: configparser.ConfigParser) -> None:
+        if config.has_section('provides'):
+            raise WrapException('Unexpected "[provides]" section, did you mean "[provide]"?')
+        if config.has_section('provide'):
+            for k, v in config['provide'].items():
+                if k == 'dependency_names':
+                    # A comma separated list of dependency names that does not
+                    # need a variable name; must be lowercase for consistency with
+                    # dep=variable assignment
+                    names_dict = {n.strip().lower(): None for n in v.split(',')}
+                    self.provided_deps.update(names_dict)
+                    continue
+                if k == 'program_names':
+                    # A comma separated list of program names
+                    names_list = [n.strip() for n in v.split(',')]
+                    self.provided_programs += names_list
+                    continue
+                if not v:
+                    m = (f'Empty dependency variable name for {k!r} in {self.basename}. '
+                         'If the subproject uses meson.override_dependency() '
+                         'it can be added in the "dependency_names" special key.')
+                    raise WrapException(m)
+                self.provided_deps[k] = v
+
+    def get(self, key: str) -> str:
+        try:
+            return self.values[key]
+        except KeyError:
+            raise WrapException(f'Missing key {key!r} in {self.basename}')
+
+    def get_hashfile(self, subproject_directory: str) -> str:
+        return os.path.join(subproject_directory, '.meson-subproject-wrap-hash.txt')
+
+    def update_hash_cache(self, subproject_directory: str) -> None:
+        if self.has_wrap:
+            with open(self.get_hashfile(subproject_directory), 'w', encoding='utf-8') as file:
+                file.write(self.wrapfile_hash + '\n')
+
+def get_directory(subdir_root: str, packagename: str) -> str:
+    fname = os.path.join(subdir_root, packagename + '.wrap')
+    if os.path.isfile(fname):
+        wrap = PackageDefinition(fname)
+        return wrap.directory
+    return packagename
+
+def verbose_git(cmd: T.List[str], workingdir: str, check: bool = False) -> bool:
+    '''
+    Wrapper to convert GitException to WrapException caught in interpreter.
+    '''
+    try:
+        return mesonlib.verbose_git(cmd, workingdir, check=check)
+    except mesonlib.GitException as e:
+        raise WrapException(str(e))
+
+@dataclass(eq=False)
+class Resolver:
+    source_dir: str
+    subdir: str
+    subproject: str = ''
+    wrap_mode: WrapMode = WrapMode.default
+    wrap_frontend: bool = False
+    allow_insecure: bool = False
+    silent: bool = False
+
+    def __post_init__(self) -> None:
+        self.subdir_root = os.path.join(self.source_dir, self.subdir)
+        self.cachedir = os.path.join(self.subdir_root, 'packagecache')
+        self.wraps = {} # type: T.Dict[str, PackageDefinition]
+        self.netrc: T.Optional[netrc] = None
+        self.provided_deps = {} # type: T.Dict[str, PackageDefinition]
+        self.provided_programs = {} # type: T.Dict[str, PackageDefinition]
+        self.wrapdb: T.Dict[str, T.Any] = {}
+        self.wrapdb_provided_deps: T.Dict[str, str] = {}
+        self.wrapdb_provided_programs: T.Dict[str, str] = {}
+        self.load_wraps()
+        self.load_netrc()
+        self.load_wrapdb()
+
+    def load_netrc(self) -> None:
+        try:
+            self.netrc = netrc()
+        except FileNotFoundError:
+            return
+        except Exception as e:
+            mlog.warning(f'failed to process netrc file: {e}.', fatal=False)
+
+    def load_wraps(self) -> None:
+        if not os.path.isdir(self.subdir_root):
+            return
+        root, dirs, files = next(os.walk(self.subdir_root))
+        ignore_dirs = {'packagecache', 'packagefiles'}
+        for i in files:
+            if not i.endswith('.wrap'):
+                continue
+            fname = os.path.join(self.subdir_root, i)
+            wrap = PackageDefinition(fname, self.subproject)
+            self.wraps[wrap.name] = wrap
+            ignore_dirs |= {wrap.directory, wrap.name}
+        # Add dummy package definition for directories not associated with a wrap file.
+        for i in dirs:
+            if i in ignore_dirs:
+                continue
+            fname = os.path.join(self.subdir_root, i)
+            wrap = PackageDefinition(fname, self.subproject)
+            self.wraps[wrap.name] = wrap
+
+        for wrap in self.wraps.values():
+            self.add_wrap(wrap)
+
+    def add_wrap(self, wrap: PackageDefinition) -> None:
+        for k in wrap.provided_deps.keys():
+            if k in self.provided_deps:
+                prev_wrap = self.provided_deps[k]
+                m = f'Multiple wrap files provide {k!r} dependency: {wrap.basename} and {prev_wrap.basename}'
+                raise WrapException(m)
+            self.provided_deps[k] = wrap
+        for k in wrap.provided_programs:
+            if k in self.provided_programs:
+                prev_wrap = self.provided_programs[k]
+                m = f'Multiple wrap files provide {k!r} program: {wrap.basename} and {prev_wrap.basename}'
+                raise WrapException(m)
+            self.provided_programs[k] = wrap
+
+    def load_wrapdb(self) -> None:
+        try:
+            with Path(self.subdir_root, 'wrapdb.json').open('r', encoding='utf-8') as f:
+                self.wrapdb = json.load(f)
+        except FileNotFoundError:
+            return
+        for name, info in self.wrapdb.items():
+            self.wrapdb_provided_deps.update({i: name for i in info.get('dependency_names', [])})
+            self.wrapdb_provided_programs.update({i: name for i in info.get('program_names', [])})
+
+    def get_from_wrapdb(self, subp_name: str) -> PackageDefinition:
+        info = self.wrapdb.get(subp_name)
+        if not info:
+            return None
+        self.check_can_download()
+        latest_version = info['versions'][0]
+        version, revision = latest_version.rsplit('-', 1)
+        url = urllib.request.urlopen(f'https://wrapdb.mesonbuild.com/v2/{subp_name}_{version}-{revision}/{subp_name}.wrap')
+        fname = Path(self.subdir_root, f'{subp_name}.wrap')
+        with fname.open('wb') as f:
+            f.write(url.read())
+        mlog.log(f'Installed {subp_name} version {version} revision {revision}')
+        wrap = PackageDefinition(str(fname))
+        self.wraps[wrap.name] = wrap
+        self.add_wrap(wrap)
+        return wrap
+
+    def merge_wraps(self, other_resolver: 'Resolver') -> None:
+        for k, v in other_resolver.wraps.items():
+            self.wraps.setdefault(k, v)
+        for k, v in other_resolver.provided_deps.items():
+            self.provided_deps.setdefault(k, v)
+        for k, v in other_resolver.provided_programs.items():
+            self.provided_programs.setdefault(k, v)
+
+    def find_dep_provider(self, packagename: str) -> T.Tuple[T.Optional[str], T.Optional[str]]:
+        # Python's ini parser converts all key values to lowercase.
+        # Thus the query name must also be in lower case.
+        packagename = packagename.lower()
+        wrap = self.provided_deps.get(packagename)
+        if wrap:
+            dep_var = wrap.provided_deps.get(packagename)
+            return wrap.name, dep_var
+        wrap_name = self.wrapdb_provided_deps.get(packagename)
+        return wrap_name, None
+
+    def get_varname(self, subp_name: str, depname: str) -> T.Optional[str]:
+        wrap = self.wraps.get(subp_name)
+        return wrap.provided_deps.get(depname) if wrap else None
+
+    def find_program_provider(self, names: T.List[str]) -> T.Optional[str]:
+        for name in names:
+            wrap = self.provided_programs.get(name)
+            if wrap:
+                return wrap.name
+            wrap_name = self.wrapdb_provided_programs.get(name)
+            if wrap_name:
+                return wrap_name
+        return None
+
+    def resolve(self, packagename: str, method: str) -> str:
+        self.packagename = packagename
+        self.directory = packagename
+        self.wrap = self.wraps.get(packagename)
+        if not self.wrap:
+            self.wrap = self.get_from_wrapdb(packagename)
+        if not self.wrap:
+            m = f'Neither a subproject directory nor a {self.packagename}.wrap file was found.'
+            raise WrapNotFoundException(m)
+        self.directory = self.wrap.directory
+
+        if self.wrap.has_wrap:
+            # We have a .wrap file, use directory relative to the location of
+            # the wrap file if it exists, otherwise source code will be placed
+            # into main project's subproject_dir even if the wrap file comes
+            # from another subproject.
+            self.dirname = os.path.join(os.path.dirname(self.wrap.filename), self.wrap.directory)
+            if not os.path.exists(self.dirname):
+                self.dirname = os.path.join(self.subdir_root, self.directory)
+            # Check if the wrap comes from the main project.
+            main_fname = os.path.join(self.subdir_root, self.wrap.basename)
+            if self.wrap.filename != main_fname:
+                rel = os.path.relpath(self.wrap.filename, self.source_dir)
+                mlog.log('Using', mlog.bold(rel))
+                # Write a dummy wrap file in main project that redirect to the
+                # wrap we picked.
+                with open(main_fname, 'w', encoding='utf-8') as f:
+                    f.write(textwrap.dedent(f'''\
+                        [wrap-redirect]
+                        filename = {PurePath(os.path.relpath(self.wrap.filename, self.subdir_root)).as_posix()}
+                        '''))
+        else:
+            # No wrap file, it's a dummy package definition for an existing
+            # directory. Use the source code in place.
+            self.dirname = self.wrap.filename
+        rel_path = os.path.relpath(self.dirname, self.source_dir)
+
+        if method == 'meson':
+            buildfile = os.path.join(self.dirname, 'meson.build')
+        elif method == 'cmake':
+            buildfile = os.path.join(self.dirname, 'CMakeLists.txt')
+        else:
+            raise WrapException('Only the methods "meson" and "cmake" are supported')
+
+        # The directory is there and has meson.build? Great, use it.
+        if os.path.exists(buildfile):
+            self.validate()
+            return rel_path
+
+        # Check if the subproject is a git submodule
+        self.resolve_git_submodule()
+
+        if os.path.exists(self.dirname):
+            if not os.path.isdir(self.dirname):
+                raise WrapException('Path already exists but is not a directory')
+        else:
+            if self.wrap.type == 'file':
+                self.get_file()
+            else:
+                self.check_can_download()
+                if self.wrap.type == 'git':
+                    self.get_git()
+                elif self.wrap.type == "hg":
+                    self.get_hg()
+                elif self.wrap.type == "svn":
+                    self.get_svn()
+                else:
+                    raise WrapException(f'Unknown wrap type {self.wrap.type!r}')
+            try:
+                self.apply_patch()
+                self.apply_diff_files()
+            except Exception:
+                windows_proof_rmtree(self.dirname)
+                raise
+
+        # A meson.build or CMakeLists.txt file is required in the directory
+        if not os.path.exists(buildfile):
+            raise WrapException(f'Subproject exists but has no {os.path.basename(buildfile)} file')
+
+        # At this point, the subproject has been successfully resolved for the
+        # first time so save off the hash of the entire wrap file for future
+        # reference.
+        self.wrap.update_hash_cache(self.dirname)
+
+        return rel_path
+
+    def check_can_download(self) -> None:
+        # Don't download subproject data based on wrap file if requested.
+        # Git submodules are ok (see above)!
+        if self.wrap_mode is WrapMode.nodownload:
+            m = 'Automatic wrap-based subproject downloading is disabled'
+            raise WrapException(m)
+
+    def resolve_git_submodule(self) -> bool:
+        # Is git installed? If not, we're probably not in a git repository and
+        # definitely cannot try to conveniently set up a submodule.
+        if not GIT:
+            return False
+        # Does the directory exist? Even uninitialised submodules checkout an
+        # empty directory to work in
+        if not os.path.isdir(self.dirname):
+            return False
+        # Are we in a git repository?
+        ret, out = quiet_git(['rev-parse'], Path(self.dirname).parent)
+        if not ret:
+            return False
+        # Is `dirname` a submodule?
+        ret, out = quiet_git(['submodule', 'status', '.'], self.dirname)
+        if not ret:
+            return False
+        # Submodule has not been added, add it
+        if out.startswith('+'):
+            mlog.warning('git submodule might be out of date')
+            return True
+        elif out.startswith('U'):
+            raise WrapException('git submodule has merge conflicts')
+        # Submodule exists, but is deinitialized or wasn't initialized
+        elif out.startswith('-'):
+            if verbose_git(['submodule', 'update', '--init', '.'], self.dirname):
+                return True
+            raise WrapException('git submodule failed to init')
+        # Submodule looks fine, but maybe it wasn't populated properly. Do a checkout.
+        elif out.startswith(' '):
+            verbose_git(['submodule', 'update', '.'], self.dirname)
+            verbose_git(['checkout', '.'], self.dirname)
+            # Even if checkout failed, try building it anyway and let the user
+            # handle any problems manually.
+            return True
+        elif out == '':
+            # It is not a submodule, just a folder that exists in the main repository.
+            return False
+        raise WrapException(f'Unknown git submodule output: {out!r}')
+
+    def get_file(self) -> None:
+        path = self.get_file_internal('source')
+        extract_dir = self.subdir_root
+        # Some upstreams ship packages that do not have a leading directory.
+        # Create one for them.
+        if 'lead_directory_missing' in self.wrap.values:
+            os.mkdir(self.dirname)
+            extract_dir = self.dirname
+        shutil.unpack_archive(path, extract_dir)
+
+    def get_git(self) -> None:
+        if not GIT:
+            raise WrapException(f'Git program not found, cannot download {self.packagename}.wrap via git.')
+        revno = self.wrap.get('revision')
+        checkout_cmd = ['-c', 'advice.detachedHead=false', 'checkout', revno, '--']
+        is_shallow = False
+        depth_option = []    # type: T.List[str]
+        if self.wrap.values.get('depth', '') != '':
+            is_shallow = True
+            depth_option = ['--depth', self.wrap.values.get('depth')]
+        # for some reason git only allows commit ids to be shallowly fetched by fetch not with clone
+        if is_shallow and self.is_git_full_commit_id(revno):
+            # git doesn't support directly cloning shallowly for commits,
+            # so we follow https://stackoverflow.com/a/43136160
+            verbose_git(['-c', 'init.defaultBranch=meson-dummy-branch', 'init', self.directory], self.subdir_root, check=True)
+            verbose_git(['remote', 'add', 'origin', self.wrap.get('url')], self.dirname, check=True)
+            revno = self.wrap.get('revision')
+            verbose_git(['fetch', *depth_option, 'origin', revno], self.dirname, check=True)
+            verbose_git(checkout_cmd, self.dirname, check=True)
+            if self.wrap.values.get('clone-recursive', '').lower() == 'true':
+                verbose_git(['submodule', 'update', '--init', '--checkout',
+                             '--recursive', *depth_option], self.dirname, check=True)
+            push_url = self.wrap.values.get('push-url')
+            if push_url:
+                verbose_git(['remote', 'set-url', '--push', 'origin', push_url], self.dirname, check=True)
+        else:
+            if not is_shallow:
+                verbose_git(['clone', self.wrap.get('url'), self.directory], self.subdir_root, check=True)
+                if revno.lower() != 'head':
+                    if not verbose_git(checkout_cmd, self.dirname):
+                        verbose_git(['fetch', self.wrap.get('url'), revno], self.dirname, check=True)
+                        verbose_git(checkout_cmd, self.dirname, check=True)
+            else:
+                args = ['-c', 'advice.detachedHead=false', 'clone', *depth_option]
+                if revno.lower() != 'head':
+                    args += ['--branch', revno]
+                args += [self.wrap.get('url'), self.directory]
+                verbose_git(args, self.subdir_root, check=True)
+            if self.wrap.values.get('clone-recursive', '').lower() == 'true':
+                verbose_git(['submodule', 'update', '--init', '--checkout', '--recursive', *depth_option],
+                            self.dirname, check=True)
+            push_url = self.wrap.values.get('push-url')
+            if push_url:
+                verbose_git(['remote', 'set-url', '--push', 'origin', push_url], self.dirname, check=True)
+
+    def validate(self) -> None:
+        # This check is only for subprojects with wraps.
+        if not self.wrap.has_wrap:
+            return
+
+        # Retrieve original hash, if it exists.
+        hashfile = self.wrap.get_hashfile(self.dirname)
+        if os.path.isfile(hashfile):
+            with open(hashfile, 'r', encoding='utf-8') as file:
+                expected_hash = file.read().strip()
+        else:
+            # If stored hash doesn't exist then don't warn.
+            return
+
+        actual_hash = self.wrap.wrapfile_hash
+
+        # Compare hashes and warn the user if they don't match.
+        if expected_hash != actual_hash:
+            mlog.warning(f'Subproject {self.wrap.name}\'s revision may be out of date; its wrap file has changed since it was first configured')
+
+    def is_git_full_commit_id(self, revno: str) -> bool:
+        result = False
+        if len(revno) in {40, 64}: # 40 for sha1, 64 for upcoming sha256
+            result = all(ch in '0123456789AaBbCcDdEeFf' for ch in revno)
+        return result
+
+    def get_hg(self) -> None:
+        revno = self.wrap.get('revision')
+        hg = shutil.which('hg')
+        if not hg:
+            raise WrapException('Mercurial program not found.')
+        subprocess.check_call([hg, 'clone', self.wrap.get('url'),
+                               self.directory], cwd=self.subdir_root)
+        if revno.lower() != 'tip':
+            subprocess.check_call([hg, 'checkout', revno],
+                                  cwd=self.dirname)
+
+    def get_svn(self) -> None:
+        revno = self.wrap.get('revision')
+        svn = shutil.which('svn')
+        if not svn:
+            raise WrapException('SVN program not found.')
+        subprocess.check_call([svn, 'checkout', '-r', revno, self.wrap.get('url'),
+                               self.directory], cwd=self.subdir_root)
+
+    def get_netrc_credentials(self, netloc: str) -> T.Optional[T.Tuple[str, str]]:
+        if self.netrc is None or netloc not in self.netrc.hosts:
+            return None
+
+        login, account, password = self.netrc.authenticators(netloc)
+        if account is not None:
+            login = account
+
+        return login, password
+
+    def get_data(self, urlstring: str) -> T.Tuple[str, str]:
+        blocksize = 10 * 1024
+        h = hashlib.sha256()
+        tmpfile = tempfile.NamedTemporaryFile(mode='wb', dir=self.cachedir, delete=False)
+        url = urllib.parse.urlparse(urlstring)
+        if url.hostname and url.hostname.endswith(WHITELIST_SUBDOMAIN):
+            resp = open_wrapdburl(urlstring, allow_insecure=self.allow_insecure, have_opt=self.wrap_frontend)
+        elif WHITELIST_SUBDOMAIN in urlstring:
+            raise WrapException(f'{urlstring} may be a WrapDB-impersonating URL')
+        else:
+            headers = {'User-Agent': f'mesonbuild/{coredata.version}'}
+            creds = self.get_netrc_credentials(url.netloc)
+
+            if creds is not None and '@' not in url.netloc:
+                login, password = creds
+                if url.scheme == 'https':
+                    enc_creds = b64encode(f'{login}:{password}'.encode()).decode()
+                    headers.update({'Authorization': f'Basic {enc_creds}'})
+                elif url.scheme == 'ftp':
+                    urlstring = urllib.parse.urlunparse(url._replace(netloc=f'{login}:{password}@{url.netloc}'))
+                else:
+                    mlog.warning('Meson is not going to use netrc credentials for protocols other than https/ftp',
+                                 fatal=False)
+
+            try:
+                req = urllib.request.Request(urlstring, headers=headers)
+                resp = urllib.request.urlopen(req, timeout=REQ_TIMEOUT)
+            except urllib.error.URLError as e:
+                mlog.log(str(e))
+                raise WrapException(f'could not get {urlstring} is the internet available?')
+        with contextlib.closing(resp) as resp, tmpfile as tmpfile:
+            try:
+                dlsize = int(resp.info()['Content-Length'])
+            except TypeError:
+                dlsize = None
+            if dlsize is None:
+                print('Downloading file of unknown size.')
+                while True:
+                    block = resp.read(blocksize)
+                    if block == b'':
+                        break
+                    h.update(block)
+                    tmpfile.write(block)
+                hashvalue = h.hexdigest()
+                return hashvalue, tmpfile.name
+            sys.stdout.flush()
+            progress_bar = ProgressBar(bar_type='download', total=dlsize,
+                                       desc='Downloading',
+                                       disable=(self.silent or None))
+            while True:
+                block = resp.read(blocksize)
+                if block == b'':
+                    break
+                h.update(block)
+                tmpfile.write(block)
+                progress_bar.update(len(block))
+            progress_bar.close()
+            hashvalue = h.hexdigest()
+        return hashvalue, tmpfile.name
+
+    def check_hash(self, what: str, path: str, hash_required: bool = True) -> None:
+        if what + '_hash' not in self.wrap.values and not hash_required:
+            return
+        expected = self.wrap.get(what + '_hash').lower()
+        h = hashlib.sha256()
+        with open(path, 'rb') as f:
+            h.update(f.read())
+        dhash = h.hexdigest()
+        if dhash != expected:
+            raise WrapException(f'Incorrect hash for {what}:\n {expected} expected\n {dhash} actual.')
+
+    def get_data_with_backoff(self, urlstring: str) -> T.Tuple[str, str]:
+        delays = [1, 2, 4, 8, 16]
+        for d in delays:
+            try:
+                return self.get_data(urlstring)
+            except Exception as e:
+                mlog.warning(f'failed to download with error: {e}. Trying after a delay...', fatal=False)
+                time.sleep(d)
+        return self.get_data(urlstring)
+
+    def download(self, what: str, ofname: str, fallback: bool = False) -> None:
+        self.check_can_download()
+        srcurl = self.wrap.get(what + ('_fallback_url' if fallback else '_url'))
+        mlog.log('Downloading', mlog.bold(self.packagename), what, 'from', mlog.bold(srcurl))
+        try:
+            dhash, tmpfile = self.get_data_with_backoff(srcurl)
+            expected = self.wrap.get(what + '_hash').lower()
+            if dhash != expected:
+                os.remove(tmpfile)
+                raise WrapException(f'Incorrect hash for {what}:\n {expected} expected\n {dhash} actual.')
+        except WrapException:
+            if not fallback:
+                if what + '_fallback_url' in self.wrap.values:
+                    return self.download(what, ofname, fallback=True)
+                mlog.log('A fallback URL could be specified using',
+                         mlog.bold(what + '_fallback_url'), 'key in the wrap file')
+            raise
+        os.rename(tmpfile, ofname)
+
+    def get_file_internal(self, what: str) -> str:
+        filename = self.wrap.get(what + '_filename')
+        if what + '_url' in self.wrap.values:
+            cache_path = os.path.join(self.cachedir, filename)
+
+            if os.path.exists(cache_path):
+                self.check_hash(what, cache_path)
+                mlog.log('Using', mlog.bold(self.packagename), what, 'from cache.')
+                return cache_path
+
+            os.makedirs(self.cachedir, exist_ok=True)
+            self.download(what, cache_path)
+            return cache_path
+        else:
+            path = Path(self.wrap.filesdir) / filename
+
+            if not path.exists():
+                raise WrapException(f'File "{path}" does not exist')
+            self.check_hash(what, path.as_posix(), hash_required=False)
+
+            return path.as_posix()
+
+    def apply_patch(self) -> None:
+        if 'patch_filename' in self.wrap.values and 'patch_directory' in self.wrap.values:
+            m = f'Wrap file {self.wrap.basename!r} must not have both "patch_filename" and "patch_directory"'
+            raise WrapException(m)
+        if 'patch_filename' in self.wrap.values:
+            path = self.get_file_internal('patch')
+            try:
+                shutil.unpack_archive(path, self.subdir_root)
+            except Exception:
+                with tempfile.TemporaryDirectory() as workdir:
+                    shutil.unpack_archive(path, workdir)
+                    self.copy_tree(workdir, self.subdir_root)
+        elif 'patch_directory' in self.wrap.values:
+            patch_dir = self.wrap.values['patch_directory']
+            src_dir = os.path.join(self.wrap.filesdir, patch_dir)
+            if not os.path.isdir(src_dir):
+                raise WrapException(f'patch directory does not exist: {patch_dir}')
+            self.copy_tree(src_dir, self.dirname)
+
+    def apply_diff_files(self) -> None:
+        for filename in self.wrap.diff_files:
+            mlog.log(f'Applying diff file "{filename}"')
+            path = Path(self.wrap.filesdir) / filename
+            if not path.exists():
+                raise WrapException(f'Diff file "{path}" does not exist')
+            relpath = os.path.relpath(str(path), self.dirname)
+            if PATCH:
+                # Always pass a POSIX path to patch, because on Windows it's MSYS
+                cmd = [PATCH, '-f', '-p1', '-i', str(Path(relpath).as_posix())]
+            elif GIT:
+                # If the `patch` command is not available, fall back to `git
+                # apply`. The `--work-tree` is necessary in case we're inside a
+                # Git repository: by default, Git will try to apply the patch to
+                # the repository root.
+                cmd = [GIT, '--work-tree', '.', 'apply', '-p1', relpath]
+            else:
+                raise WrapException('Missing "patch" or "git" commands to apply diff files')
+
+            p, out, _ = Popen_safe(cmd, cwd=self.dirname, stderr=subprocess.STDOUT)
+            if p.returncode != 0:
+                mlog.log(out.strip())
+                raise WrapException(f'Failed to apply diff file "{filename}"')
+
+    def copy_tree(self, root_src_dir: str, root_dst_dir: str) -> None:
+        """
+        Copy directory tree. Overwrites also read only files.
+        """
+        for src_dir, _, files in os.walk(root_src_dir):
+            dst_dir = src_dir.replace(root_src_dir, root_dst_dir, 1)
+            if not os.path.exists(dst_dir):
+                os.makedirs(dst_dir)
+            for file_ in files:
+                src_file = os.path.join(src_dir, file_)
+                dst_file = os.path.join(dst_dir, file_)
+                if os.path.exists(dst_file):
+                    try:
+                        os.remove(dst_file)
+                    except PermissionError:
+                        os.chmod(dst_file, stat.S_IWUSR)
+                        os.remove(dst_file)
+                shutil.copy2(src_file, dst_dir)
diff --git a/vendored-meson/meson/mesonbuild/wrap/wraptool.py b/vendored-meson/meson/mesonbuild/wrap/wraptool.py
new file mode 100644
index 000000000000..c009aa1c6e01
--- /dev/null
+++ b/vendored-meson/meson/mesonbuild/wrap/wraptool.py
@@ -0,0 +1,231 @@
+# Copyright 2015-2016 The Meson development team
+
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+
+#     http://www.apache.org/licenses/LICENSE-2.0
+
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import annotations
+
+import sys, os
+import configparser
+import shutil
+import typing as T
+
+from glob import glob
+from .wrap import (open_wrapdburl, WrapException, get_releases, get_releases_data,
+                   update_wrap_file, parse_patch_url)
+from pathlib import Path
+
+from .. import mesonlib, msubprojects
+
+if T.TYPE_CHECKING:
+    import argparse
+
+def add_arguments(parser: 'argparse.ArgumentParser') -> None:
+    subparsers = parser.add_subparsers(title='Commands', dest='command')
+    subparsers.required = True
+
+    p = subparsers.add_parser('list', help='show all available projects')
+    p.add_argument('--allow-insecure', default=False, action='store_true',
+                   help='Allow insecure server connections.')
+    p.set_defaults(wrap_func=list_projects)
+
+    p = subparsers.add_parser('search', help='search the db by name')
+    p.add_argument('--allow-insecure', default=False, action='store_true',
+                   help='Allow insecure server connections.')
+    p.add_argument('name')
+    p.set_defaults(wrap_func=search)
+
+    p = subparsers.add_parser('install', help='install the specified project')
+    p.add_argument('--allow-insecure', default=False, action='store_true',
+                   help='Allow insecure server connections.')
+    p.add_argument('name')
+    p.set_defaults(wrap_func=install)
+
+    p = msubprojects.add_wrap_update_parser(subparsers)
+    p.set_defaults(wrap_func=msubprojects.run)
+
+    p = subparsers.add_parser('info', help='show available versions of a project')
+    p.add_argument('--allow-insecure', default=False, action='store_true',
+                   help='Allow insecure server connections.')
+    p.add_argument('name')
+    p.set_defaults(wrap_func=info)
+
+    p = subparsers.add_parser('status', help='show installed and available versions of your projects')
+    p.add_argument('--allow-insecure', default=False, action='store_true',
+                   help='Allow insecure server connections.')
+    p.set_defaults(wrap_func=status)
+
+    p = subparsers.add_parser('promote', help='bring a subsubproject up to the master project')
+    p.add_argument('project_path')
+    p.set_defaults(wrap_func=promote)
+
+    p = subparsers.add_parser('update-db', help='Update list of projects available in WrapDB (Since 0.61.0)')
+    p.add_argument('--allow-insecure', default=False, action='store_true',
+                   help='Allow insecure server connections.')
+    p.set_defaults(wrap_func=update_db)
+
+def list_projects(options: 'argparse.Namespace') -> None:
+    releases = get_releases(options.allow_insecure)
+    for p in releases.keys():
+        print(p)
+
+def search(options: 'argparse.Namespace') -> None:
+    name = options.name
+    releases = get_releases(options.allow_insecure)
+    for p, info in releases.items():
+        if p.find(name) != -1:
+            print(p)
+        else:
+            for dep in info.get('dependency_names', []):
+                if dep.find(name) != -1:
+                    print(f'Dependency {dep} found in wrap {p}')
+
+def get_latest_version(name: str, allow_insecure: bool) -> T.Tuple[str, str]:
+    releases = get_releases(allow_insecure)
+    info = releases.get(name)
+    if not info:
+        raise WrapException(f'Wrap {name} not found in wrapdb')
+    latest_version = info['versions'][0]
+    version, revision = latest_version.rsplit('-', 1)
+    return version, revision
+
+def install(options: 'argparse.Namespace') -> None:
+    name = options.name
+    if not os.path.isdir('subprojects'):
+        raise SystemExit('Subprojects dir not found. Run this script in your source root directory.')
+    if os.path.isdir(os.path.join('subprojects', name)):
+        raise SystemExit('Subproject directory for this project already exists.')
+    wrapfile = os.path.join('subprojects', name + '.wrap')
+    if os.path.exists(wrapfile):
+        raise SystemExit('Wrap file already exists.')
+    (version, revision) = get_latest_version(name, options.allow_insecure)
+    url = open_wrapdburl(f'https://wrapdb.mesonbuild.com/v2/{name}_{version}-{revision}/{name}.wrap', options.allow_insecure, True)
+    with open(wrapfile, 'wb') as f:
+        f.write(url.read())
+    print(f'Installed {name} version {version} revision {revision}')
+
+def get_current_version(wrapfile: str) -> T.Tuple[str, str, str, str, T.Optional[str]]:
+    cp = configparser.ConfigParser(interpolation=None)
+    cp.read(wrapfile)
+    try:
+        wrap_data = cp['wrap-file']
+    except KeyError:
+        raise WrapException('Not a wrap-file, cannot have come from the wrapdb')
+    try:
+        patch_url = wrap_data['patch_url']
+    except KeyError:
+        # We assume a wrap without a patch_url is probably just an pointer to upstream's
+        # build files. The version should be in the tarball filename, even if it isn't
+        # purely guaranteed. The wrapdb revision should be 1 because it just needs uploading once.
+        branch = mesonlib.search_version(wrap_data['source_filename'])
+        revision, patch_filename = '1', None
+    else:
+        branch, revision = parse_patch_url(patch_url)
+        patch_filename = wrap_data['patch_filename']
+    return branch, revision, wrap_data['directory'], wrap_data['source_filename'], patch_filename
+
+def update(options: 'argparse.Namespace') -> None:
+    name = options.name
+    if not os.path.isdir('subprojects'):
+        raise SystemExit('Subprojects dir not found. Run this command in your source root directory.')
+    wrapfile = os.path.join('subprojects', name + '.wrap')
+    if not os.path.exists(wrapfile):
+        raise SystemExit('Project ' + name + ' is not in use.')
+    (branch, revision, subdir, src_file, patch_file) = get_current_version(wrapfile)
+    (new_branch, new_revision) = get_latest_version(name, options.allow_insecure)
+    if new_branch == branch and new_revision == revision:
+        print('Project ' + name + ' is already up to date.')
+        raise SystemExit
+    update_wrap_file(wrapfile, name, new_branch, new_revision, options.allow_insecure)
+    shutil.rmtree(os.path.join('subprojects', subdir), ignore_errors=True)
+    try:
+        os.unlink(os.path.join('subprojects/packagecache', src_file))
+    except FileNotFoundError:
+        pass
+    if patch_file is not None:
+        try:
+            os.unlink(os.path.join('subprojects/packagecache', patch_file))
+        except FileNotFoundError:
+            pass
+    print(f'Updated {name} version {new_branch} revision {new_revision}')
+
+def info(options: 'argparse.Namespace') -> None:
+    name = options.name
+    releases = get_releases(options.allow_insecure)
+    info = releases.get(name)
+    if not info:
+        raise WrapException(f'Wrap {name} not found in wrapdb')
+    print(f'Available versions of {name}:')
+    for v in info['versions']:
+        print(' ', v)
+
+def do_promotion(from_path: str, spdir_name: str) -> None:
+    if os.path.isfile(from_path):
+        assert from_path.endswith('.wrap')
+        shutil.copy(from_path, spdir_name)
+    elif os.path.isdir(from_path):
+        sproj_name = os.path.basename(from_path)
+        outputdir = os.path.join(spdir_name, sproj_name)
+        if os.path.exists(outputdir):
+            raise SystemExit(f'Output dir {outputdir} already exists. Will not overwrite.')
+        shutil.copytree(from_path, outputdir, ignore=shutil.ignore_patterns('subprojects'))
+
+def promote(options: 'argparse.Namespace') -> None:
+    argument = options.project_path
+    spdir_name = 'subprojects'
+    sprojs = mesonlib.detect_subprojects(spdir_name)
+
+    # check if the argument is a full path to a subproject directory or wrap file
+    system_native_path_argument = argument.replace('/', os.sep)
+    for matches in sprojs.values():
+        if system_native_path_argument in matches:
+            do_promotion(system_native_path_argument, spdir_name)
+            return
+
+    # otherwise the argument is just a subproject basename which must be unambiguous
+    if argument not in sprojs:
+        raise SystemExit(f'Subproject {argument} not found in directory tree.')
+    matches = sprojs[argument]
+    if len(matches) > 1:
+        print(f'There is more than one version of {argument} in tree. Please specify which one to promote:\n', file=sys.stderr)
+        for s in matches:
+            print(s, file=sys.stderr)
+        raise SystemExit(1)
+    do_promotion(matches[0], spdir_name)
+
+def status(options: 'argparse.Namespace') -> None:
+    print('Subproject status')
+    for w in glob('subprojects/*.wrap'):
+        name = os.path.basename(w)[:-5]
+        try:
+            (latest_branch, latest_revision) = get_latest_version(name, options.allow_insecure)
+        except Exception:
+            print('', name, 'not available in wrapdb.', file=sys.stderr)
+            continue
+        try:
+            (current_branch, current_revision, _, _, _) = get_current_version(w)
+        except Exception:
+            print('', name, 'Wrap file not from wrapdb.', file=sys.stderr)
+            continue
+        if current_branch == latest_branch and current_revision == latest_revision:
+            print('', name, f'up to date. Branch {current_branch}, revision {current_revision}.')
+        else:
+            print('', name, f'not up to date. Have {current_branch} {current_revision}, but {latest_branch} {latest_revision} is available.')
+
+def update_db(options: 'argparse.Namespace') -> None:
+    data = get_releases_data(options.allow_insecure)
+    Path('subprojects').mkdir(exist_ok=True)
+    with Path('subprojects/wrapdb.json').open('wb') as f:
+        f.write(data)
+
+def run(options: 'argparse.Namespace') -> int:
+    options.wrap_func(options)
+    return 0