From 3d98aeea9c70b2a7336d9ea8f7397b5c6d07d405 Mon Sep 17 00:00:00 2001
From: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
Date: Sat, 5 Apr 2025 22:17:28 +0900
Subject: [PATCH 001/156] fix(toolchains): correctly order the toolchains
(#2735)
Since toolchain matching is done by matching the first target that
matches target settings, the `minor_mapping` config setting is special,
because e.g. all `3.11.X` toolchains match the `python_version = "3.11"`
setting.
This just reshuffles the list so that we have toolchains that are in the
`minor_mapping` before the rest.
At the same time remove the workaround from the `lock.bzl` where the bug
was initially discovered.
Fixes #2685
---
CHANGELOG.md | 3 +
python/private/python.bzl | 17 +-
python/uv/private/BUILD.bazel | 2 -
python/uv/private/lock.bzl | 31 +--
.../transition/multi_version_tests.bzl | 3 +-
tests/python/python_tests.bzl | 52 +++++
tests/toolchains/transitions/BUILD.bazel | 5 +
.../transitions/transitions_tests.bzl | 182 ++++++++++++++++++
8 files changed, 269 insertions(+), 26 deletions(-)
create mode 100644 tests/toolchains/transitions/BUILD.bazel
create mode 100644 tests/toolchains/transitions/transitions_tests.bzl
diff --git a/CHANGELOG.md b/CHANGELOG.md
index bbcf2561c8..b11270cb25 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -78,6 +78,9 @@ Unreleased changes template.
* (toolchains) Do not try to run `chmod` when downloading non-windows hermetic toolchain
repositories on Windows. Fixes
[#2660](https://github.com/bazel-contrib/rules_python/issues/2660).
+* (toolchains) The toolchain matching is has been fixed when writing
+ transitions transitioning on the `python_version` flag.
+ Fixes [#2685](https://github.com/bazel-contrib/rules_python/issues/2685).
{#v0-0-0-added}
### Added
diff --git a/python/private/python.bzl b/python/private/python.bzl
index 44eb09f766..296fb0ab7d 100644
--- a/python/private/python.bzl
+++ b/python/private/python.bzl
@@ -243,10 +243,25 @@ def parse_modules(*, module_ctx, _fail = fail):
if len(toolchains) > _MAX_NUM_TOOLCHAINS:
fail("more than {} python versions are not supported".format(_MAX_NUM_TOOLCHAINS))
+ # sort the toolchains so that the toolchain versions that are in the
+ # `minor_mapping` are coming first. This ensures that `python_version =
+ # "3.X"` transitions work as expected.
+ minor_version_toolchains = []
+ other_toolchains = []
+ minor_mapping = list(config.minor_mapping.values())
+ for t in toolchains:
+ # FIXME @aignas 2025-04-04: How can we unit test that this ordering is
+ # consistent with what would actually work?
+ if config.minor_mapping.get(t.python_version, t.python_version) in minor_mapping:
+ minor_version_toolchains.append(t)
+ else:
+ other_toolchains.append(t)
+ toolchains = minor_version_toolchains + other_toolchains
+
return struct(
config = config,
debug_info = debug_info,
- default_python_version = toolchains[-1].python_version,
+ default_python_version = default_toolchain.python_version,
toolchains = [
struct(
python_version = t.python_version,
diff --git a/python/uv/private/BUILD.bazel b/python/uv/private/BUILD.bazel
index d17ca39490..587ad9a0f9 100644
--- a/python/uv/private/BUILD.bazel
+++ b/python/uv/private/BUILD.bazel
@@ -43,10 +43,8 @@ bzl_library(
":toolchain_types_bzl",
"//python:py_binary_bzl",
"//python/private:bzlmod_enabled_bzl",
- "//python/private:full_version_bzl",
"//python/private:toolchain_types_bzl",
"@bazel_skylib//lib:shell",
- "@pythons_hub//:versions_bzl",
],
)
diff --git a/python/uv/private/lock.bzl b/python/uv/private/lock.bzl
index 69d277d653..45a3819ee6 100644
--- a/python/uv/private/lock.bzl
+++ b/python/uv/private/lock.bzl
@@ -16,10 +16,8 @@
"""
load("@bazel_skylib//lib:shell.bzl", "shell")
-load("@pythons_hub//:versions.bzl", "DEFAULT_PYTHON_VERSION", "MINOR_MAPPING")
load("//python:py_binary.bzl", "py_binary")
load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED") # buildifier: disable=bzl-visibility
-load("//python/private:full_version.bzl", "full_version")
load("//python/private:toolchain_types.bzl", "EXEC_TOOLS_TOOLCHAIN_TYPE") # buildifier: disable=bzl-visibility
load(":toolchain_types.bzl", "UV_TOOLCHAIN_TYPE")
@@ -75,15 +73,15 @@ def _args(ctx):
def _lock_impl(ctx):
srcs = ctx.files.srcs
- python_version = full_version(
- version = ctx.attr.python_version or DEFAULT_PYTHON_VERSION,
- minor_mapping = MINOR_MAPPING,
- )
- output = ctx.actions.declare_file("{}.{}.out".format(
- ctx.label.name,
- python_version.replace(".", "_"),
- ))
+ fname = "{}.out".format(ctx.label.name)
+ python_version = ctx.attr.python_version
+ if python_version:
+ fname = "{}.{}.out".format(
+ ctx.label.name,
+ python_version.replace(".", "_"),
+ )
+ output = ctx.actions.declare_file(fname)
toolchain_info = ctx.toolchains[UV_TOOLCHAIN_TYPE]
uv = toolchain_info.uv_toolchain_info.uv[DefaultInfo].files_to_run.executable
@@ -166,15 +164,7 @@ def _transition_impl(input_settings, attr):
_PYTHON_VERSION_FLAG: input_settings[_PYTHON_VERSION_FLAG],
}
if attr.python_version:
- # FIXME @aignas 2025-03-20: using `full_version` is a workaround for a bug in
- # how we order toolchains in bazel. If I set the `python_version` flag
- # to `3.12`, I would expect the latest version to be selected, i.e. the
- # one that is in MINOR_MAPPING, but it seems that 3.12.0 is selected,
- # because of how the targets are ordered.
- settings[_PYTHON_VERSION_FLAG] = full_version(
- version = attr.python_version,
- minor_mapping = MINOR_MAPPING,
- )
+ settings[_PYTHON_VERSION_FLAG] = attr.python_version
return settings
_python_version_transition = transition(
@@ -436,9 +426,6 @@ def lock(
if not BZLMOD_ENABLED:
kwargs["target_compatible_with"] = ["@platforms//:incompatible"]
- # FIXME @aignas 2025-03-17: should we have one more target that transitions
- # the python_version to ensure that if somebody calls `bazel build
- # :requirements` that it is locked with the right `python_version`?
_lock(
name = name,
args = args,
diff --git a/tests/config_settings/transition/multi_version_tests.bzl b/tests/config_settings/transition/multi_version_tests.bzl
index aca341a295..93f6efd728 100644
--- a/tests/config_settings/transition/multi_version_tests.bzl
+++ b/tests/config_settings/transition/multi_version_tests.bzl
@@ -13,6 +13,7 @@
# limitations under the License.
"""Tests for py_test."""
+load("@pythons_hub//:versions.bzl", "DEFAULT_PYTHON_VERSION")
load("@rules_testing//lib:analysis_test.bzl", "analysis_test")
load("@rules_testing//lib:test_suite.bzl", "test_suite")
load("@rules_testing//lib:util.bzl", "TestingAspectInfo", rt_util = "util")
@@ -29,7 +30,7 @@ load("//tests/support:support.bzl", "CC_TOOLCHAIN")
# If the toolchain is not resolved then you will have a weird message telling
# you that your transition target does not have a PyRuntime provider, which is
# caused by there not being a toolchain detected for the target.
-_PYTHON_VERSION = "3.11"
+_PYTHON_VERSION = DEFAULT_PYTHON_VERSION
_tests = []
diff --git a/tests/python/python_tests.bzl b/tests/python/python_tests.bzl
index 1679794e15..97c47b57db 100644
--- a/tests/python/python_tests.bzl
+++ b/tests/python/python_tests.bzl
@@ -284,6 +284,58 @@ def _test_default_non_rules_python_ignore_root_user_error_non_root_module(env):
_tests.append(_test_default_non_rules_python_ignore_root_user_error_non_root_module)
+def _test_toolchain_ordering(env):
+ py = parse_modules(
+ module_ctx = _mock_mctx(
+ _mod(
+ name = "my_module",
+ toolchain = [
+ _toolchain("3.10"),
+ _toolchain("3.10.15"),
+ _toolchain("3.10.16"),
+ _toolchain("3.10.11"),
+ _toolchain("3.11.1"),
+ _toolchain("3.11.10"),
+ _toolchain("3.11.11", is_default = True),
+ ],
+ ),
+ _mod(name = "rules_python", toolchain = [_toolchain("3.11")]),
+ ),
+ )
+ got_versions = [
+ t.python_version
+ for t in py.toolchains
+ ]
+
+ env.expect.that_str(py.default_python_version).equals("3.11.11")
+ env.expect.that_dict(py.config.minor_mapping).contains_exactly({
+ "3.10": "3.10.16",
+ "3.11": "3.11.11",
+ "3.12": "3.12.9",
+ "3.13": "3.13.2",
+ "3.8": "3.8.20",
+ "3.9": "3.9.21",
+ })
+ env.expect.that_collection(got_versions).contains_exactly([
+ # First the full-version toolchains that are in minor_mapping
+ # so that they get matched first if only the `python_version` is in MINOR_MAPPING
+ #
+ # The default version is always set in the `python_version` flag, so know, that
+ # the default match will be somewhere in the first bunch.
+ "3.10",
+ "3.10.16",
+ "3.11",
+ "3.11.11",
+ # Next, the rest, where we will match things based on the `python_version` being
+ # the same
+ "3.10.15",
+ "3.10.11",
+ "3.11.1",
+ "3.11.10",
+ ]).in_order()
+
+_tests.append(_test_toolchain_ordering)
+
def _test_default_from_defaults(env):
py = parse_modules(
module_ctx = _mock_mctx(
diff --git a/tests/toolchains/transitions/BUILD.bazel b/tests/toolchains/transitions/BUILD.bazel
new file mode 100644
index 0000000000..a7bef8c0e5
--- /dev/null
+++ b/tests/toolchains/transitions/BUILD.bazel
@@ -0,0 +1,5 @@
+load(":transitions_tests.bzl", "transitions_test_suite")
+
+transitions_test_suite(
+ name = "transitions_tests",
+)
diff --git a/tests/toolchains/transitions/transitions_tests.bzl b/tests/toolchains/transitions/transitions_tests.bzl
new file mode 100644
index 0000000000..bddd1745f0
--- /dev/null
+++ b/tests/toolchains/transitions/transitions_tests.bzl
@@ -0,0 +1,182 @@
+# Copyright 2022 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""
+
+load("@pythons_hub//:versions.bzl", "DEFAULT_PYTHON_VERSION", "MINOR_MAPPING")
+load("@rules_testing//lib:analysis_test.bzl", "analysis_test")
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("@rules_testing//lib:util.bzl", rt_util = "util")
+load("//python:versions.bzl", "TOOL_VERSIONS")
+load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED") # buildifier: disable=bzl-visibility
+load("//python/private:full_version.bzl", "full_version") # buildifier: disable=bzl-visibility
+load("//python/private:toolchain_types.bzl", "EXEC_TOOLS_TOOLCHAIN_TYPE") # buildifier: disable=bzl-visibility
+load("//tests/support:support.bzl", "PYTHON_VERSION")
+
+_analysis_tests = []
+
+def _transition_impl(input_settings, attr):
+ """Transition based on python_version flag.
+
+ This is a simple transition impl that a user of rules_python may implement
+ for their own rule.
+ """
+ settings = {
+ PYTHON_VERSION: input_settings[PYTHON_VERSION],
+ }
+ if attr.python_version:
+ settings[PYTHON_VERSION] = attr.python_version
+ return settings
+
+_python_version_transition = transition(
+ implementation = _transition_impl,
+ inputs = [PYTHON_VERSION],
+ outputs = [PYTHON_VERSION],
+)
+
+TestInfo = provider(
+ doc = "A simple test provider to forward the values for the assertion.",
+ fields = {"got": "", "want": ""},
+)
+
+def _impl(ctx):
+ if ctx.attr.skip:
+ return [TestInfo(got = "", want = "")]
+
+ exec_tools = ctx.toolchains[EXEC_TOOLS_TOOLCHAIN_TYPE].exec_tools
+ got_version = exec_tools.exec_interpreter[platform_common.ToolchainInfo].py3_runtime.interpreter_version_info
+
+ return [
+ TestInfo(
+ got = "{}.{}.{}".format(
+ got_version.major,
+ got_version.minor,
+ got_version.micro,
+ ),
+ want = ctx.attr.want_version,
+ ),
+ ]
+
+_simple_transition = rule(
+ implementation = _impl,
+ attrs = {
+ "python_version": attr.string(
+ doc = "The input python version which we transition on.",
+ ),
+ "skip": attr.bool(
+ doc = "Whether to skip the test",
+ ),
+ "want_version": attr.string(
+ doc = "The python version that we actually expect to receive.",
+ ),
+ "_allowlist_function_transition": attr.label(
+ default = "@bazel_tools//tools/allowlists/function_transition_allowlist",
+ ),
+ },
+ toolchains = [
+ config_common.toolchain_type(
+ EXEC_TOOLS_TOOLCHAIN_TYPE,
+ mandatory = False,
+ ),
+ ],
+ cfg = _python_version_transition,
+)
+
+def _test_transitions(*, name, tests, skip = False):
+ """A reusable rule so that we can split the tests."""
+ targets = {}
+ for test_name, (input_version, want_version) in tests.items():
+ target_name = "{}_{}".format(name, test_name)
+ targets["python_" + test_name] = target_name
+ rt_util.helper_target(
+ _simple_transition,
+ name = target_name,
+ python_version = input_version,
+ want_version = want_version,
+ skip = skip,
+ )
+
+ analysis_test(
+ name = name,
+ impl = _test_transition_impl,
+ targets = targets,
+ )
+
+def _test_transition_impl(env, targets):
+ # Check that the forwarded version from the PyRuntimeInfo is correct
+ for target in dir(targets):
+ if not target.startswith("python"):
+ # Skip other attributes that might be not the ones we set (e.g. to_json, to_proto).
+ continue
+
+ test_info = env.expect.that_target(getattr(targets, target)).provider(
+ TestInfo,
+ factory = lambda v, meta: v,
+ )
+ env.expect.that_str(test_info.got).equals(test_info.want)
+
+def _test_full_version(name):
+ """Check that python_version transitions work.
+
+ Expectation is to get the same full version that we input.
+ """
+ _test_transitions(
+ name = name,
+ tests = {
+ v.replace(".", "_"): (v, v)
+ for v in TOOL_VERSIONS
+ },
+ )
+
+_analysis_tests.append(_test_full_version)
+
+def _test_minor_versions(name):
+ """Ensure that MINOR_MAPPING versions are correctly selected."""
+ _test_transitions(
+ name = name,
+ skip = not BZLMOD_ENABLED,
+ tests = {
+ minor.replace(".", "_"): (minor, full)
+ for minor, full in MINOR_MAPPING.items()
+ },
+ )
+
+_analysis_tests.append(_test_minor_versions)
+
+def _test_default(name):
+ """Check the default version.
+
+ Lastly, if we don't provide any version to the transition, we should
+ get the default version
+ """
+ default_version = full_version(
+ version = DEFAULT_PYTHON_VERSION,
+ minor_mapping = MINOR_MAPPING,
+ ) if DEFAULT_PYTHON_VERSION else ""
+
+ _test_transitions(
+ name = name,
+ skip = not BZLMOD_ENABLED,
+ tests = {
+ "default": (None, default_version),
+ },
+ )
+
+_analysis_tests.append(_test_default)
+
+def transitions_test_suite(name):
+ test_suite(
+ name = name,
+ tests = _analysis_tests,
+ )
From f685fe9a192dcdc8b65376821d9f25b990aa54fa Mon Sep 17 00:00:00 2001
From: Matt Mackay
Date: Sat, 5 Apr 2025 09:43:16 -0400
Subject: [PATCH 002/156] fix: allow warn logging to be disabled via
RULES_PYTHON_REPO_DEBUG_VERBOSITY (#2737)
Allows the logging level to be set to `FAIL`, removing `WARN` logging.
---------
Co-authored-by: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
---
CHANGELOG.md | 1 +
docs/environment-variables.md | 1 +
python/private/repo_utils.bzl | 1 +
3 files changed, 3 insertions(+)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b11270cb25..33acd38706 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -78,6 +78,7 @@ Unreleased changes template.
* (toolchains) Do not try to run `chmod` when downloading non-windows hermetic toolchain
repositories on Windows. Fixes
[#2660](https://github.com/bazel-contrib/rules_python/issues/2660).
+* (logging) Allow repo rule logging level to be set to `FAIL` via the `RULES_PYTHON_REPO_DEBUG_VERBOSITY` environment variable.
* (toolchains) The toolchain matching is has been fixed when writing
transitions transitioning on the `python_version` flag.
Fixes [#2685](https://github.com/bazel-contrib/rules_python/issues/2685).
diff --git a/docs/environment-variables.md b/docs/environment-variables.md
index d8735cb2d5..9500fa8295 100644
--- a/docs/environment-variables.md
+++ b/docs/environment-variables.md
@@ -101,6 +101,7 @@ doing. This is mostly useful for development to debug errors.
Determines the verbosity of logging output for repo rules. Valid values:
* `DEBUG`
+* `FAIL`
* `INFO`
* `TRACE`
:::
diff --git a/python/private/repo_utils.bzl b/python/private/repo_utils.bzl
index d9ad2449f1..73883a9244 100644
--- a/python/private/repo_utils.bzl
+++ b/python/private/repo_utils.bzl
@@ -56,6 +56,7 @@ def _logger(mrctx, name = None):
verbosity = {
"DEBUG": 2,
+ "FAIL": -1,
"INFO": 1,
"TRACE": 3,
}.get(verbosity_level, 0)
From f65b2ac7b20354cf18400cb6512548405a88639c Mon Sep 17 00:00:00 2001
From: Matt Mackay
Date: Sat, 5 Apr 2025 11:51:41 -0400
Subject: [PATCH 003/156] fix: run check on interpreter in isolated mode
(#2738)
Runs the check on the interpreter in the toolchain repo in isolated mode
via `-I`. This ensures it's not influenced by userland environment
variables, such as `PYTHONPATH` which will cause issues if it allows
this invocation to use into another interpreter versions site-packages.
---
CHANGELOG.md | 1 +
python/private/toolchains_repo.bzl | 9 ++++++++-
2 files changed, 9 insertions(+), 1 deletion(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 33acd38706..ac41e81f6b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -82,6 +82,7 @@ Unreleased changes template.
* (toolchains) The toolchain matching is has been fixed when writing
transitions transitioning on the `python_version` flag.
Fixes [#2685](https://github.com/bazel-contrib/rules_python/issues/2685).
+* (toolchains) Run the check on the Python interpreter in isolated mode, to ensure it's not affected by userland environment variables, such as `PYTHONPATH`.
{#v0-0-0-added}
### Added
diff --git a/python/private/toolchains_repo.bzl b/python/private/toolchains_repo.bzl
index 4e4a5de501..23c4643c0a 100644
--- a/python/private/toolchains_repo.bzl
+++ b/python/private/toolchains_repo.bzl
@@ -275,7 +275,14 @@ assert want_python == got_python, \
repo_utils.execute_checked(
rctx,
op = "CheckHostInterpreter",
- arguments = [rctx.path(python_binary), python_tester],
+ arguments = [
+ rctx.path(python_binary),
+ # Run the interpreter in isolated mode, this options implies -E, -P and -s.
+ # This ensures that environment variables are ignored that are set in userspace, such as PYTHONPATH,
+ # which may interfere with this invocation.
+ "-I",
+ python_tester,
+ ],
)
if not rctx.delete(python_tester):
fail("Failed to delete the python tester")
From 537fc4b9e461639144083a1542e10f7589c5251f Mon Sep 17 00:00:00 2001
From: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
Date: Sun, 6 Apr 2025 00:51:57 +0900
Subject: [PATCH 004/156] fix(pypi): correctly fallback to pip for git direct
URLs (#2732)
Whilst integrating #2695 I introduced a regression and here I add a test
for that and fix it. The code that was getting the filename from the URL
was too eager and would break if there was a git ref as noted in the
test.
Before this commit and #2695 the code was not handling all of the cases
that are tested now either, so I think now we are in a good place. I am
not sure how we should handle the `git_repository` URLs. Maybe having
`http_archive` and `git_repository` usage would be nice, but I am not
sure
how we can introduce it at the moment.
Work towards #2363
---
python/private/pypi/parse_requirements.bzl | 6 +++
tests/pypi/extension/extension_tests.bzl | 50 +++++++++++++++++++++-
2 files changed, 55 insertions(+), 1 deletion(-)
diff --git a/python/private/pypi/parse_requirements.bzl b/python/private/pypi/parse_requirements.bzl
index 3280ce8df1..d2014a7eb9 100644
--- a/python/private/pypi/parse_requirements.bzl
+++ b/python/private/pypi/parse_requirements.bzl
@@ -297,6 +297,12 @@ def _add_dists(*, requirement, index_urls, logger = None):
if requirement.srcs.url:
url = requirement.srcs.url
_, _, filename = url.rpartition("/")
+ if "." not in filename:
+ # detected filename has no extension, it might be an sdist ref
+ # TODO @aignas 2025-04-03: should be handled if the following is fixed:
+ # https://github.com/bazel-contrib/rules_python/issues/2363
+ return [], None
+
direct_url_dist = struct(
url = url,
filename = filename,
diff --git a/tests/pypi/extension/extension_tests.bzl b/tests/pypi/extension/extension_tests.bzl
index 3a91c7b108..ab7a1358ad 100644
--- a/tests/pypi/extension/extension_tests.bzl
+++ b/tests/pypi/extension/extension_tests.bzl
@@ -662,6 +662,8 @@ some_pkg==0.0.1 @ example-direct.org/some_pkg-0.0.1-py3-none-any.whl \
direct_without_sha==0.0.1 @ example-direct.org/direct_without_sha-0.0.1-py3-none-any.whl
some_other_pkg==0.0.1
pip_fallback==0.0.1
+direct_sdist_without_sha @ some-archive/any-name.tar.gz
+git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef
""",
}[x],
),
@@ -672,10 +674,28 @@ pip_fallback==0.0.1
)
pypi.is_reproducible().equals(False)
- pypi.exposed_packages().contains_exactly({"pypi": ["direct_without_sha", "pip_fallback", "simple", "some_other_pkg", "some_pkg"]})
+ pypi.exposed_packages().contains_exactly({"pypi": [
+ "direct_sdist_without_sha",
+ "direct_without_sha",
+ "git_dep",
+ "pip_fallback",
+ "simple",
+ "some_other_pkg",
+ "some_pkg",
+ ]})
pypi.hub_group_map().contains_exactly({"pypi": {}})
pypi.hub_whl_map().contains_exactly({
"pypi": {
+ "direct_sdist_without_sha": {
+ "pypi_315_any_name": [
+ struct(
+ config_setting = None,
+ filename = "any-name.tar.gz",
+ target_platforms = None,
+ version = "3.15",
+ ),
+ ],
+ },
"direct_without_sha": {
"pypi_315_direct_without_sha_0_0_1_py3_none_any": [
struct(
@@ -686,6 +706,16 @@ pip_fallback==0.0.1
),
],
},
+ "git_dep": {
+ "pypi_315_git_dep": [
+ struct(
+ config_setting = None,
+ filename = None,
+ target_platforms = None,
+ version = "3.15",
+ ),
+ ],
+ },
"pip_fallback": {
"pypi_315_pip_fallback": [
struct(
@@ -737,6 +767,17 @@ pip_fallback==0.0.1
},
})
pypi.whl_libraries().contains_exactly({
+ "pypi_315_any_name": {
+ "dep_template": "@pypi//{name}:{target}",
+ "experimental_target_platforms": ["cp315_linux_aarch64", "cp315_linux_arm", "cp315_linux_ppc", "cp315_linux_s390x", "cp315_linux_x86_64", "cp315_osx_aarch64", "cp315_osx_x86_64", "cp315_windows_x86_64"],
+ "extra_pip_args": ["--extra-args-for-sdist-building"],
+ "filename": "any-name.tar.gz",
+ "python_interpreter_target": "unit_test_interpreter_target",
+ "repo": "pypi_315",
+ "requirement": "direct_sdist_without_sha @ some-archive/any-name.tar.gz",
+ "sha256": "",
+ "urls": ["some-archive/any-name.tar.gz"],
+ },
"pypi_315_direct_without_sha_0_0_1_py3_none_any": {
"dep_template": "@pypi//{name}:{target}",
"experimental_target_platforms": ["cp315_linux_aarch64", "cp315_linux_arm", "cp315_linux_ppc", "cp315_linux_s390x", "cp315_linux_x86_64", "cp315_osx_aarch64", "cp315_osx_x86_64", "cp315_windows_x86_64"],
@@ -747,6 +788,13 @@ pip_fallback==0.0.1
"sha256": "",
"urls": ["example-direct.org/direct_without_sha-0.0.1-py3-none-any.whl"],
},
+ "pypi_315_git_dep": {
+ "dep_template": "@pypi//{name}:{target}",
+ "extra_pip_args": ["--extra-args-for-sdist-building"],
+ "python_interpreter_target": "unit_test_interpreter_target",
+ "repo": "pypi_315",
+ "requirement": "git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef",
+ },
"pypi_315_pip_fallback": {
"dep_template": "@pypi//{name}:{target}",
"extra_pip_args": ["--extra-args-for-sdist-building"],
From 69a99200fa38096675bd37ba2856eb3077cd3b86 Mon Sep 17 00:00:00 2001
From: Jason Bedard
Date: Sat, 5 Apr 2025 09:02:59 -0700
Subject: [PATCH 005/156] fix: support gazelle generation_mode:update_only
(#2708)
This just fixes a crash when `generation_mode: update_only` causes
`GenerateRules` to not be invoked for 100% of directories.
Fix #2707
---
gazelle/pythonconfig/pythonconfig.go | 25 +++++++++++-------
gazelle/pythonconfig/pythonconfig_test.go | 32 +++++++++++++++++++++++
2 files changed, 48 insertions(+), 9 deletions(-)
diff --git a/gazelle/pythonconfig/pythonconfig.go b/gazelle/pythonconfig/pythonconfig.go
index 2183ec60a3..23c0cfd572 100644
--- a/gazelle/pythonconfig/pythonconfig.go
+++ b/gazelle/pythonconfig/pythonconfig.go
@@ -22,8 +22,8 @@ import (
"github.com/emirpasic/gods/lists/singlylinkedlist"
- "github.com/bazelbuild/bazel-gazelle/label"
"github.com/bazel-contrib/rules_python/gazelle/manifest"
+ "github.com/bazelbuild/bazel-gazelle/label"
)
// Directives
@@ -125,21 +125,28 @@ const (
// defaultIgnoreFiles is the list of default values used in the
// python_ignore_files option.
-var defaultIgnoreFiles = map[string]struct{}{
-}
+var defaultIgnoreFiles = map[string]struct{}{}
// Configs is an extension of map[string]*Config. It provides finding methods
// on top of the mapping.
type Configs map[string]*Config
// ParentForPackage returns the parent Config for the given Bazel package.
-func (c *Configs) ParentForPackage(pkg string) *Config {
- dir := path.Dir(pkg)
- if dir == "." {
- dir = ""
+func (c Configs) ParentForPackage(pkg string) *Config {
+ for {
+ dir := path.Dir(pkg)
+ if dir == "." {
+ dir = ""
+ }
+ parent := (map[string]*Config)(c)[dir]
+ if parent != nil {
+ return parent
+ }
+ if dir == "" {
+ return nil
+ }
+ pkg = dir
}
- parent := (map[string]*Config)(*c)[dir]
- return parent
}
// Config represents a config extension for a specific Bazel package.
diff --git a/gazelle/pythonconfig/pythonconfig_test.go b/gazelle/pythonconfig/pythonconfig_test.go
index 7cdb9af1d1..fe21ce236e 100644
--- a/gazelle/pythonconfig/pythonconfig_test.go
+++ b/gazelle/pythonconfig/pythonconfig_test.go
@@ -248,3 +248,35 @@ func TestFormatThirdPartyDependency(t *testing.T) {
})
}
}
+
+func TestConfigsMap(t *testing.T) {
+ t.Run("only root", func(t *testing.T) {
+ configs := Configs{"": New("root/dir", "")}
+
+ if configs.ParentForPackage("") == nil {
+ t.Fatal("expected non-nil for root config")
+ }
+
+ if configs.ParentForPackage("a/b/c") != configs[""] {
+ t.Fatal("expected root for subpackage")
+ }
+ })
+
+ t.Run("sparse child configs", func(t *testing.T) {
+ configs := Configs{"": New("root/dir", "")}
+ configs["a"] = configs[""].NewChild()
+ configs["a/b/c"] = configs["a"].NewChild()
+
+ if configs.ParentForPackage("a/b/c/d") != configs["a/b/c"] {
+ t.Fatal("child should match direct parent")
+ }
+
+ if configs.ParentForPackage("a/b/c/d/e") != configs["a/b/c"] {
+ t.Fatal("grandchild should match first parant")
+ }
+
+ if configs.ParentForPackage("other/root/path") != configs[""] {
+ t.Fatal("non-configured subpackage should match root")
+ }
+ })
+}
From 2bc357787e8d6e76fd2f58e401cf3062bcf4f415 Mon Sep 17 00:00:00 2001
From: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
Date: Sun, 6 Apr 2025 01:27:12 +0900
Subject: [PATCH 006/156] fix(pypi): mark the extension reproducible (#2730)
This will remove the merge conflicts and improve the usability when the
`MODULE.bazel.lock` is used together with `rules_python`. This means
that the lock file will not be used to read the `URL` and `sha256`
values for the Python sources when the `experimental_index_url` is used,
but the idea is that that information will be kept in repo cache.
Fixes #2434
Created #2731 to leverage the bazel feature to write immutable facts to
the lock file once it becomes available.
---
CHANGELOG.md | 3 +++
python/private/pypi/extension.bzl | 6 +-----
tests/pypi/extension/extension_tests.bzl | 7 -------
3 files changed, 4 insertions(+), 12 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index ac41e81f6b..69e9330f64 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -69,6 +69,9 @@ Unreleased changes template.
* (toolchains) Remove all but `3.8.20` versions of the Python `3.8` interpreter who has
reached EOL. If users still need other versions of the `3.8` interpreter, please supply
the URLs manually {bzl:ob}`python.toolchain` or {bzl:obj}`python_register_toolchains` calls.
+* (pypi) The PyPI extension will no longer write the lock file entries as the
+ extension has been marked reproducible.
+ Fixes [#2434](https://github.com/bazel-contrib/rules_python/issues/2434).
[20250317]: https://github.com/astral-sh/python-build-standalone/releases/tag/20250317
diff --git a/python/private/pypi/extension.bzl b/python/private/pypi/extension.bzl
index f782e69a45..8fce47656b 100644
--- a/python/private/pypi/extension.bzl
+++ b/python/private/pypi/extension.bzl
@@ -419,8 +419,6 @@ You cannot use both the additive_build_content and additive_build_content_file a
extra_aliases = {}
whl_libraries = {}
- is_reproducible = True
-
for mod in module_ctx.modules:
for pip_attr in mod.tags.parse:
hub_name = pip_attr.hub_name
@@ -458,7 +456,6 @@ You cannot use both the additive_build_content and additive_build_content_file a
get_index_urls = None
if pip_attr.experimental_index_url:
- is_reproducible = False
skip_sources = [
normalize_name(s)
for s in pip_attr.simpleapi_skip
@@ -543,7 +540,6 @@ You cannot use both the additive_build_content and additive_build_content_file a
k: dict(sorted(args.items()))
for k, args in sorted(whl_libraries.items())
},
- is_reproducible = is_reproducible,
)
def _pip_impl(module_ctx):
@@ -640,7 +636,7 @@ def _pip_impl(module_ctx):
# In order to be able to dogfood the `experimental_index_url` feature before it gets
# stabilized, we have created the `_pip_non_reproducible` function, that will result
# in extra entries in the lock file.
- return module_ctx.extension_metadata(reproducible = mods.is_reproducible)
+ return module_ctx.extension_metadata(reproducible = True)
else:
return None
diff --git a/tests/pypi/extension/extension_tests.bzl b/tests/pypi/extension/extension_tests.bzl
index ab7a1358ad..1652e76156 100644
--- a/tests/pypi/extension/extension_tests.bzl
+++ b/tests/pypi/extension/extension_tests.bzl
@@ -64,7 +64,6 @@ def _parse_modules(env, **kwargs):
return env.expect.that_struct(
parse_modules(**kwargs),
attrs = dict(
- is_reproducible = subjects.bool,
exposed_packages = subjects.dict,
hub_group_map = subjects.dict,
hub_whl_map = subjects.dict,
@@ -160,7 +159,6 @@ def _test_simple(env):
},
)
- pypi.is_reproducible().equals(True)
pypi.exposed_packages().contains_exactly({"pypi": ["simple"]})
pypi.hub_group_map().contains_exactly({"pypi": {}})
pypi.hub_whl_map().contains_exactly({"pypi": {
@@ -209,7 +207,6 @@ def _test_simple_multiple_requirements(env):
},
)
- pypi.is_reproducible().equals(True)
pypi.exposed_packages().contains_exactly({"pypi": ["simple"]})
pypi.hub_group_map().contains_exactly({"pypi": {}})
pypi.hub_whl_map().contains_exactly({"pypi": {
@@ -278,7 +275,6 @@ torch==2.4.1 ; platform_machine != 'x86_64' \
},
)
- pypi.is_reproducible().equals(True)
pypi.exposed_packages().contains_exactly({"pypi": ["torch"]})
pypi.hub_group_map().contains_exactly({"pypi": {}})
pypi.hub_whl_map().contains_exactly({"pypi": {
@@ -404,7 +400,6 @@ torch==2.4.1+cpu ; platform_machine == 'x86_64' \
simpleapi_download = mocksimpleapi_download,
)
- pypi.is_reproducible().equals(False)
pypi.exposed_packages().contains_exactly({"pypi": ["torch"]})
pypi.hub_group_map().contains_exactly({"pypi": {}})
pypi.hub_whl_map().contains_exactly({"pypi": {
@@ -535,7 +530,6 @@ simple==0.0.3 \
},
)
- pypi.is_reproducible().equals(True)
pypi.exposed_packages().contains_exactly({"pypi": ["simple"]})
pypi.hub_group_map().contains_exactly({"pypi": {}})
pypi.hub_whl_map().contains_exactly({"pypi": {
@@ -673,7 +667,6 @@ git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef
simpleapi_download = mocksimpleapi_download,
)
- pypi.is_reproducible().equals(False)
pypi.exposed_packages().contains_exactly({"pypi": [
"direct_sdist_without_sha",
"direct_without_sha",
From 01968255660aa99041c0c8989a0d68c01aa2978e Mon Sep 17 00:00:00 2001
From: Richard Levasseur
Date: Sat, 5 Apr 2025 09:37:21 -0700
Subject: [PATCH 007/156] feat: allow populating binary's venv site-packages
with symlinks (#2617)
This implements functionality to allow libraries to populate the
site-packages directory
of downstream binaries. The basic implementation is:
* Libraries provide tuples of `(runfile path, site packages path)` in
the
`PyInfo.site_packages_symlinks` field.
* Binaries create symlinks (using declare_symlink) in their
site-packages directory
pointing to the runfiles paths libraries provide.
The design was chosen because of the following properties:
* The site-packages directory is relocatable
* Populating site packages is cheap ( `O(number 3p dependencies)` )
* Dependencies are only created once in the runfiles, no matter
how many how many binaries there that use them. This minimizes disk
usage,
file counts, inodes, etc.
The `site_packages_symlinks` field is a depset with topological
ordering. Using topological
ordering allows dependencies closer to the binary to have precedence,
which gives some
basic control over what entries are used.
Additionally, the runfiles path to link to can be None/empty, in which
case, the
directory in site-packages won't be created. This allows binaries to
prevent creation
of directories that might e.g. conflict.
For now, this functionality is disabled by default. The flag
`--venvs_site_packages=yes`
can be set to allow using it, which is automatically enable it for pypi
generated targets.
When enabled, it does basic detection of implicit namespace directories,
which
allows multiple distributions to "install" into the the same
site-packages directory.
Though this functionality is primarily useful for dependencies from pypi
(e.g. via
pip.parse), it is not yet activated for those targets, for two main
reasons:
1. The wheel extraction code creates pkgutil-style `__init__.py` shims
during the repo-phase.
The build phase can't distinguish these artifical rules_python generated
shims from
actual `__init__.py` files, which breaks the implicit namespace
detection logic.
2. A flag guard is needed before changing the behavior. Even though how
3p libraries are
added to sys.path is an implementation detail, the behavior has been
there for many
years, so an escape hatch should be added.
Work towards https://github.com/bazelbuild/rules_python/issues/2156
---
.bazelrc | 4 +-
CHANGELOG.md | 4 +
MODULE.bazel | 6 +
docs/BUILD.bazel | 1 +
docs/_includes/experimental_api.md | 5 +
.../python/config_settings/index.md | 17 ++
internal_dev_deps.bzl | 6 +
python/BUILD.bazel | 3 +
python/config_settings/BUILD.bazel | 8 +
python/features.bzl | 43 ++++-
python/private/attributes.bzl | 11 ++
python/private/builders.bzl | 13 +-
python/private/common.bzl | 17 +-
python/private/enum.bzl | 20 +++
python/private/flags.bzl | 38 ++---
python/private/py_executable.bzl | 76 ++++++++-
python/private/py_info.bzl | 35 +++-
python/private/py_library.bzl | 161 +++++++++++++++++-
python/private/pypi/whl_library_targets.bzl | 1 +
tests/modules/other/BUILD.bazel | 0
tests/modules/other/MODULE.bazel | 3 +
tests/modules/other/nspkg_delta/BUILD.bazel | 10 ++
.../nspkg/subnspkg/delta/__init__.py | 1 +
tests/modules/other/nspkg_gamma/BUILD.bazel | 10 ++
.../nspkg/subnspkg/gamma/__init__.py | 1 +
.../whl_library_targets_tests.bzl | 2 +
tests/support/sh_py_run_test.bzl | 4 +
tests/venv_site_packages_libs/BUILD.bazel | 17 ++
tests/venv_site_packages_libs/bin.py | 32 ++++
.../nspkg_alpha/BUILD.bazel | 10 ++
.../nspkg/subnspkg/alpha/__init__.py | 1 +
.../nspkg_beta/BUILD.bazel | 10 ++
.../nspkg/subnspkg/beta/__init__.py | 1 +
.../venv_site_packages_pypi_test.py | 36 ++++
34 files changed, 574 insertions(+), 33 deletions(-)
create mode 100644 docs/_includes/experimental_api.md
create mode 100644 tests/modules/other/BUILD.bazel
create mode 100644 tests/modules/other/MODULE.bazel
create mode 100644 tests/modules/other/nspkg_delta/BUILD.bazel
create mode 100644 tests/modules/other/nspkg_delta/site-packages/nspkg/subnspkg/delta/__init__.py
create mode 100644 tests/modules/other/nspkg_gamma/BUILD.bazel
create mode 100644 tests/modules/other/nspkg_gamma/site-packages/nspkg/subnspkg/gamma/__init__.py
create mode 100644 tests/venv_site_packages_libs/BUILD.bazel
create mode 100644 tests/venv_site_packages_libs/bin.py
create mode 100644 tests/venv_site_packages_libs/nspkg_alpha/BUILD.bazel
create mode 100644 tests/venv_site_packages_libs/nspkg_alpha/site-packages/nspkg/subnspkg/alpha/__init__.py
create mode 100644 tests/venv_site_packages_libs/nspkg_beta/BUILD.bazel
create mode 100644 tests/venv_site_packages_libs/nspkg_beta/site-packages/nspkg/subnspkg/beta/__init__.py
create mode 100644 tests/venv_site_packages_libs/venv_site_packages_pypi_test.py
diff --git a/.bazelrc b/.bazelrc
index ada5c5a0a7..4e6f2fa187 100644
--- a/.bazelrc
+++ b/.bazelrc
@@ -4,8 +4,8 @@
# (Note, we cannot use `common --deleted_packages` because the bazel version command doesn't support it)
# To update these lines, execute
# `bazel run @rules_bazel_integration_test//tools:update_deleted_packages`
-build --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/pythonconfig,gazelle/python/private,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/custom_commands,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/local_toolchains,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/py_cc_toolchain_registered
-query --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/pythonconfig,gazelle/python/private,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/custom_commands,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/local_toolchains,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/py_cc_toolchain_registered
+build --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/pythonconfig,gazelle/python/private,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/custom_commands,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/local_toolchains,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/py_cc_toolchain_registered,tests/modules/other,tests/modules/other/nspkg_delta,tests/modules/other/nspkg_gamma
+query --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/pythonconfig,gazelle/python/private,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/custom_commands,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/local_toolchains,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/py_cc_toolchain_registered,tests/modules/other,tests/modules/other/nspkg_delta,tests/modules/other/nspkg_gamma
test --test_output=errors
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 69e9330f64..818773e589 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -107,6 +107,10 @@ Unreleased changes template.
please check the {obj}`uv.configure` tag class.
* Add support for riscv64 linux platform.
* (toolchains) Add python 3.13.2 and 3.12.9 toolchains
+* (providers) (experimental) {obj}`PyInfo.site_packages_symlinks` field added to
+ allow specifying links to create within the venv site packages (only
+ applicable with {obj}`--bootstrap_impl=script`)
+ ([#2156](https://github.com/bazelbuild/rules_python/issues/2156)).
{#v0-0-0-removed}
### Removed
diff --git a/MODULE.bazel b/MODULE.bazel
index e4e45af7f0..c649896344 100644
--- a/MODULE.bazel
+++ b/MODULE.bazel
@@ -85,6 +85,7 @@ bazel_dep(name = "rules_shell", version = "0.3.0", dev_dependency = True)
bazel_dep(name = "rules_multirun", version = "0.9.0", dev_dependency = True)
bazel_dep(name = "bazel_ci_rules", version = "1.0.0", dev_dependency = True)
bazel_dep(name = "rules_pkg", version = "1.0.1", dev_dependency = True)
+bazel_dep(name = "other", version = "0", dev_dependency = True)
# Extra gazelle plugin deps so that WORKSPACE.bzlmod can continue including it for e2e tests.
# We use `WORKSPACE.bzlmod` because it is impossible to have dev-only local overrides.
@@ -106,6 +107,11 @@ local_path_override(
path = "gazelle",
)
+local_path_override(
+ module_name = "other",
+ path = "tests/modules/other",
+)
+
dev_python = use_extension(
"//python/extensions:python.bzl",
"python",
diff --git a/docs/BUILD.bazel b/docs/BUILD.bazel
index bebecd18b2..29eac6e714 100644
--- a/docs/BUILD.bazel
+++ b/docs/BUILD.bazel
@@ -87,6 +87,7 @@ sphinx_stardocs(
name = "bzl_api_docs",
srcs = [
"//python:defs_bzl",
+ "//python:features_bzl",
"//python:packaging_bzl",
"//python:pip_bzl",
"//python:py_binary_bzl",
diff --git a/docs/_includes/experimental_api.md b/docs/_includes/experimental_api.md
new file mode 100644
index 0000000000..45473a7cbf
--- /dev/null
+++ b/docs/_includes/experimental_api.md
@@ -0,0 +1,5 @@
+:::{warning}
+
+**Experimental API.** This API is still under development and may change or be
+removed without notice.
+:::
diff --git a/docs/api/rules_python/python/config_settings/index.md b/docs/api/rules_python/python/config_settings/index.md
index 79c7d0c109..340335d9b1 100644
--- a/docs/api/rules_python/python/config_settings/index.md
+++ b/docs/api/rules_python/python/config_settings/index.md
@@ -213,6 +213,23 @@ Values:
::::
+::::
+
+:::{flag} venvs_site_packages
+
+Determines if libraries use a site-packages layout for their files.
+
+Note this flag only affects PyPI dependencies of `--bootstrap_impl=script` binaries
+
+:::{include} /_includes/experimental_api.md
+:::
+
+
+Values:
+* `no` (default): Make libraries importable by adding to `sys.path`
+* `yes`: Make libraries importable by creating paths in a binary's site-packages directory.
+::::
+
::::{bzl:flag} bootstrap_impl
Determine how programs implement their startup process.
diff --git a/internal_dev_deps.bzl b/internal_dev_deps.bzl
index cd33475f43..87690be1ad 100644
--- a/internal_dev_deps.bzl
+++ b/internal_dev_deps.bzl
@@ -15,6 +15,7 @@
"""Dependencies that are needed for development and testing of rules_python itself."""
load("@bazel_tools//tools/build_defs/repo:http.bzl", _http_archive = "http_archive", _http_file = "http_file")
+load("@bazel_tools//tools/build_defs/repo:local.bzl", "local_repository")
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
load("//python/private:internal_config_repo.bzl", "internal_config_repo") # buildifier: disable=bzl-visibility
@@ -42,6 +43,11 @@ def rules_python_internal_deps():
"""
internal_config_repo(name = "rules_python_internal")
+ local_repository(
+ name = "other",
+ path = "tests/modules/other",
+ )
+
http_archive(
name = "bazel_skylib",
sha256 = "bc283cdfcd526a52c3201279cda4bc298652efa898b10b4db0837dc51652756f",
diff --git a/python/BUILD.bazel b/python/BUILD.bazel
index c52e772666..a699c81cc4 100644
--- a/python/BUILD.bazel
+++ b/python/BUILD.bazel
@@ -79,6 +79,9 @@ bzl_library(
bzl_library(
name = "features_bzl",
srcs = ["features.bzl"],
+ deps = [
+ "@rules_python_internal//:rules_python_config_bzl",
+ ],
)
bzl_library(
diff --git a/python/config_settings/BUILD.bazel b/python/config_settings/BUILD.bazel
index 796cf0c9c4..45354e24d9 100644
--- a/python/config_settings/BUILD.bazel
+++ b/python/config_settings/BUILD.bazel
@@ -9,6 +9,7 @@ load(
"LibcFlag",
"PrecompileFlag",
"PrecompileSourceRetentionFlag",
+ "VenvsSitePackages",
"VenvsUseDeclareSymlinkFlag",
)
load(
@@ -195,6 +196,13 @@ string_flag(
visibility = ["//visibility:public"],
)
+string_flag(
+ name = "venvs_site_packages",
+ build_setting_default = VenvsSitePackages.NO,
+ # NOTE: Only public because it is used in pip hub repos.
+ visibility = ["//visibility:public"],
+)
+
define_pypi_internal_flags(
name = "define_pypi_internal_flags",
)
diff --git a/python/features.bzl b/python/features.bzl
index a7098f4710..8edfb698fc 100644
--- a/python/features.bzl
+++ b/python/features.bzl
@@ -19,8 +19,49 @@ load("@rules_python_internal//:rules_python_config.bzl", "config")
# See https://git-scm.com/docs/git-archive/2.29.0#Documentation/git-archive.txt-export-subst
_VERSION_PRIVATE = "$Format:%(describe:tags=true)$"
+def _features_typedef():
+ """Information about features rules_python has implemented.
+
+ ::::{field} precompile
+ :type: bool
+
+ True if the precompile attributes are available.
+
+ :::{versionadded} 0.33.0
+ :::
+ ::::
+
+ ::::{field} py_info_site_packages_symlinks
+
+ True if the `PyInfo.site_packages_symlinks` field is available.
+
+ :::{versionadded} VERSION_NEXT_FEATURE
+ :::
+ ::::
+
+ ::::{field} uses_builtin_rules
+ :type: bool
+
+ True if the rules are using the Bazel-builtin implementation.
+
+ :::{versionadded} 1.1.0
+ :::
+ ::::
+
+ ::::{field} version
+ :type: str
+
+ The rules_python version. This is a semver format, e.g. `X.Y.Z` with
+ optional trailing `-rcN`. For unreleased versions, it is an empty string.
+ :::{versionadded} 0.38.0
+ ::::
+ """
+
features = struct(
- version = _VERSION_PRIVATE if "$Format" not in _VERSION_PRIVATE else "",
+ TYPEDEF = _features_typedef,
+ # keep sorted
precompile = True,
+ py_info_site_packages_symlinks = True,
uses_builtin_rules = not config.enable_pystar,
+ version = _VERSION_PRIVATE if "$Format" not in _VERSION_PRIVATE else "",
)
diff --git a/python/private/attributes.bzl b/python/private/attributes.bzl
index b042b3db6a..8543caba7b 100644
--- a/python/private/attributes.bzl
+++ b/python/private/attributes.bzl
@@ -254,6 +254,17 @@ These are typically `py_library` rules.
Targets that only provide data files used at runtime belong in the `data`
attribute.
+
+:::{note}
+The order of this list can matter because it affects the order that information
+from dependencies is merged in, which can be relevant depending on the ordering
+mode of depsets that are merged.
+
+* {obj}`PyInfo.site_packages_symlinks` uses topological ordering.
+
+See {obj}`PyInfo` for more information about the ordering of its depsets and
+how its fields are merged.
+:::
""",
),
"precompile": lambda: attrb.String(
diff --git a/python/private/builders.bzl b/python/private/builders.bzl
index 50aa3ed91a..54d46c2af2 100644
--- a/python/private/builders.bzl
+++ b/python/private/builders.bzl
@@ -15,12 +15,19 @@
load("@bazel_skylib//lib:types.bzl", "types")
-def _DepsetBuilder():
- """Create a builder for a depset."""
+def _DepsetBuilder(order = None):
+ """Create a builder for a depset.
+
+ Args:
+ order: {type}`str | None` The order to initialize the depset to, if any.
+
+ Returns:
+ {type}`DepsetBuilder`
+ """
# buildifier: disable=uninitialized
self = struct(
- _order = [None],
+ _order = [order],
add = lambda *a, **k: _DepsetBuilder_add(self, *a, **k),
build = lambda *a, **k: _DepsetBuilder_build(self, *a, **k),
direct = [],
diff --git a/python/private/common.bzl b/python/private/common.bzl
index 48e2653ebb..072a1bb296 100644
--- a/python/private/common.bzl
+++ b/python/private/common.bzl
@@ -30,6 +30,16 @@ PackageSpecificationInfo = getattr(py_internal, "PackageSpecificationInfo", None
# Extensions without the dot
_PYTHON_SOURCE_EXTENSIONS = ["py"]
+# Extensions that mean a file is relevant to Python
+PYTHON_FILE_EXTENSIONS = [
+ "dll", # Python C modules, Windows specific
+ "dylib", # Python C modules, Mac specific
+ "py",
+ "pyc",
+ "pyi",
+ "so", # Python C modules, usually Linux
+]
+
def create_binary_semantics_struct(
*,
create_executable,
@@ -367,7 +377,8 @@ def create_py_info(
required_pyc_files,
implicit_pyc_files,
implicit_pyc_source_files,
- imports):
+ imports,
+ site_packages_symlinks = []):
"""Create PyInfo provider.
Args:
@@ -385,6 +396,9 @@ def create_py_info(
implicit_pyc_files: {type}`depset[File]` Implicitly generated pyc files
that a binary can choose to include.
imports: depset of strings; the import path values to propagate.
+ site_packages_symlinks: {type}`list[tuple[str, str]]` tuples of
+ `(runfiles_path, site_packages_path)` for symlinks to create
+ in the consuming binary's venv site packages.
Returns:
A tuple of the PyInfo instance and a depset of the
@@ -392,6 +406,7 @@ def create_py_info(
necessary for deprecated extra actions support).
"""
py_info = PyInfoBuilder()
+ py_info.site_packages_symlinks.add(site_packages_symlinks)
py_info.direct_original_sources.add(original_sources)
py_info.direct_pyc_files.add(required_pyc_files)
py_info.direct_pyi_files.add(ctx.files.pyi_srcs)
diff --git a/python/private/enum.bzl b/python/private/enum.bzl
index d71442e3b5..4d0fb10699 100644
--- a/python/private/enum.bzl
+++ b/python/private/enum.bzl
@@ -43,3 +43,23 @@ def enum(methods = {}, **kwargs):
self = struct(__members__ = members, **kwargs)
return self
+
+def _FlagEnum_flag_values(self):
+ return sorted(self.__members__.values())
+
+def FlagEnum(**kwargs):
+ """Define an enum specialized for flags.
+
+ Args:
+ **kwargs: members of the enum.
+
+ Returns:
+ {type}`FlagEnum` struct. This is an enum with the following extras:
+ * `flag_values`: A function that returns a sorted list of the
+ flag values (enum `__members__`). Useful for passing to the
+ `values` attribute for string flags.
+ """
+ return enum(
+ methods = dict(flag_values = _FlagEnum_flag_values),
+ **kwargs
+ )
diff --git a/python/private/flags.bzl b/python/private/flags.bzl
index 1019faa8d6..c53e4610ff 100644
--- a/python/private/flags.bzl
+++ b/python/private/flags.bzl
@@ -19,27 +19,7 @@ unnecessary files when all that are needed are flag definitions.
"""
load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
-load(":enum.bzl", "enum")
-
-def _FlagEnum_flag_values(self):
- return sorted(self.__members__.values())
-
-def FlagEnum(**kwargs):
- """Define an enum specialized for flags.
-
- Args:
- **kwargs: members of the enum.
-
- Returns:
- {type}`FlagEnum` struct. This is an enum with the following extras:
- * `flag_values`: A function that returns a sorted list of the
- flag values (enum `__members__`). Useful for passing to the
- `values` attribute for string flags.
- """
- return enum(
- methods = dict(flag_values = _FlagEnum_flag_values),
- **kwargs
- )
+load(":enum.bzl", "FlagEnum", "enum")
def _AddSrcsToRunfilesFlag_is_enabled(ctx):
value = ctx.attr._add_srcs_to_runfiles_flag[BuildSettingInfo].value
@@ -138,6 +118,22 @@ VenvsUseDeclareSymlinkFlag = FlagEnum(
get_value = _venvs_use_declare_symlink_flag_get_value,
)
+def _venvs_site_packages_is_enabled(ctx):
+ if not ctx.attr.experimental_venvs_site_packages:
+ return False
+ flag_value = ctx.attr.experimental_venvs_site_packages[BuildSettingInfo].value
+ return flag_value == VenvsSitePackages.YES
+
+# Decides if libraries try to use a site-packages layout using site_packages_symlinks
+# buildifier: disable=name-conventions
+VenvsSitePackages = FlagEnum(
+ # Use site_packages_symlinks
+ YES = "yes",
+ # Don't use site_packages_symlinks
+ NO = "no",
+ is_enabled = _venvs_site_packages_is_enabled,
+)
+
# Used for matching freethreaded toolchains and would have to be used in wheels
# as well.
# buildifier: disable=name-conventions
diff --git a/python/private/py_executable.bzl b/python/private/py_executable.bzl
index fed46ab223..f33c2b6ca1 100644
--- a/python/private/py_executable.bzl
+++ b/python/private/py_executable.bzl
@@ -612,15 +612,89 @@ def _create_venv(ctx, output_prefix, imports, runtime_details):
},
computed_substitutions = computed_subs,
)
+ site_packages_symlinks = _create_site_packages_symlinks(ctx, site_packages)
return struct(
interpreter = interpreter,
recreate_venv_at_runtime = not venvs_use_declare_symlink_enabled,
# Runfiles root relative path or absolute path
interpreter_actual_path = interpreter_actual_path,
- files_without_interpreter = [pyvenv_cfg, pth, site_init],
+ files_without_interpreter = [pyvenv_cfg, pth, site_init] + site_packages_symlinks,
)
+def _create_site_packages_symlinks(ctx, site_packages):
+ """Creates symlinks within site-packages.
+
+ Args:
+ ctx: current rule ctx
+ site_packages: runfiles-root-relative path to the site-packages directory
+
+ Returns:
+ {type}`list[File]` list of the File symlink objects created.
+ """
+
+ # maps site-package symlink to the runfiles path it should point to
+ entries = depset(
+ # NOTE: Topological ordering is used so that dependencies closer to the
+ # binary have precedence in creating their symlinks. This allows the
+ # binary a modicum of control over the result.
+ order = "topological",
+ transitive = [
+ dep[PyInfo].site_packages_symlinks
+ for dep in ctx.attr.deps
+ if PyInfo in dep
+ ],
+ ).to_list()
+ link_map = _build_link_map(entries)
+
+ sp_files = []
+ for sp_dir_path, link_to in link_map.items():
+ sp_link = ctx.actions.declare_symlink(paths.join(site_packages, sp_dir_path))
+ sp_link_rf_path = runfiles_root_path(ctx, sp_link.short_path)
+ rel_path = relative_path(
+ # dirname is necessary because a relative symlink is relative to
+ # the directory the symlink resides within.
+ from_ = paths.dirname(sp_link_rf_path),
+ to = link_to,
+ )
+ ctx.actions.symlink(output = sp_link, target_path = rel_path)
+ sp_files.append(sp_link)
+ return sp_files
+
+def _build_link_map(entries):
+ link_map = {}
+ for link_to_runfiles_path, site_packages_path in entries:
+ if site_packages_path in link_map:
+ # We ignore duplicates by design. The dependency closer to the
+ # binary gets precedence due to the topological ordering.
+ continue
+ else:
+ link_map[site_packages_path] = link_to_runfiles_path
+
+ # An empty link_to value means to not create the site package symlink.
+ # Because of the topological ordering, this allows binaries to remove
+ # entries by having an earlier dependency produce empty link_to values.
+ for sp_dir_path, link_to in link_map.items():
+ if not link_to:
+ link_map.pop(sp_dir_path)
+
+ # Remove entries that would be a child path of a created symlink.
+ # Earlier entries have precedence to match how exact matches are handled.
+ keep_link_map = {}
+ for _ in range(len(link_map)):
+ if not link_map:
+ break
+ dirname, value = link_map.popitem()
+ keep_link_map[dirname] = value
+
+ prefix = dirname + "/" # Add slash to prevent /X matching /XY
+ for maybe_suffix in link_map.keys():
+ maybe_suffix += "/" # Add slash to prevent /X matching /XY
+ if maybe_suffix.startswith(prefix) or prefix.startswith(maybe_suffix):
+ link_map.pop(maybe_suffix)
+
+ return keep_link_map
+
def _map_each_identity(v):
return v
diff --git a/python/private/py_info.bzl b/python/private/py_info.bzl
index ef654c303e..4ecd02a438 100644
--- a/python/private/py_info.bzl
+++ b/python/private/py_info.bzl
@@ -42,7 +42,8 @@ def _PyInfo_init(
direct_original_sources = depset(),
transitive_original_sources = depset(),
direct_pyi_files = depset(),
- transitive_pyi_files = depset()):
+ transitive_pyi_files = depset(),
+ site_packages_symlinks = depset()):
_check_arg_type("transitive_sources", "depset", transitive_sources)
# Verify it's postorder compatible, but retain is original ordering.
@@ -70,6 +71,7 @@ def _PyInfo_init(
"has_py2_only_sources": has_py2_only_sources,
"has_py3_only_sources": has_py2_only_sources,
"imports": imports,
+ "site_packages_symlinks": site_packages_symlinks,
"transitive_implicit_pyc_files": transitive_implicit_pyc_files,
"transitive_implicit_pyc_source_files": transitive_implicit_pyc_source_files,
"transitive_original_sources": transitive_original_sources,
@@ -140,6 +142,34 @@ A depset of import path strings to be added to the `PYTHONPATH` of executable
Python targets. These are accumulated from the transitive `deps`.
The order of the depset is not guaranteed and may be changed in the future. It
is recommended to use `default` order (the default).
+""",
+ "site_packages_symlinks": """
+:type: depset[tuple[str | None, str]]
+
+A depset with `topological` ordering.
+
+Tuples of `(runfiles_path, site_packages_path)`. Where
+* `runfiles_path` is a runfiles-root relative path. It is the path that
+ has the code to make importable. If `None` or empty string, then it means
+ to not create a site packages directory with the `site_packages_path`
+ name.
+* `site_packages_path` is a path relative to the site-packages directory of
+ the venv for whatever creates the venv (typically py_binary). It makes
+ the code in `runfiles_path` available for import. Note that this
+ is created as a "raw" symlink (via `declare_symlink`).
+
+:::{include} /_includes/experimental_api.md
+:::
+
+:::{tip}
+The topological ordering means dependencies earlier and closer to the consumer
+have precedence. This allows e.g. a binary to add dependencies that override
+values from further way dependencies, such as forcing symlinks to point to
+specific paths or preventing symlinks from being created.
+:::
+
+:::{versionadded} VERSION_NEXT_FEATURE
+:::
""",
"transitive_implicit_pyc_files": """
:type: depset[File]
@@ -266,6 +296,7 @@ def PyInfoBuilder():
transitive_pyc_files = builders.DepsetBuilder(),
transitive_pyi_files = builders.DepsetBuilder(),
transitive_sources = builders.DepsetBuilder(),
+ site_packages_symlinks = builders.DepsetBuilder(order = "topological"),
)
return self
@@ -351,6 +382,7 @@ def _PyInfoBuilder_merge_all(self, transitive, *, direct = []):
self.transitive_original_sources.add(info.transitive_original_sources)
self.transitive_pyc_files.add(info.transitive_pyc_files)
self.transitive_pyi_files.add(info.transitive_pyi_files)
+ self.site_packages_symlinks.add(info.site_packages_symlinks)
return self
@@ -400,6 +432,7 @@ def _PyInfoBuilder_build(self):
transitive_original_sources = self.transitive_original_sources.build(),
transitive_pyc_files = self.transitive_pyc_files.build(),
transitive_pyi_files = self.transitive_pyi_files.build(),
+ site_packages_symlinks = self.site_packages_symlinks.build(),
)
else:
kwargs = {}
diff --git a/python/private/py_library.bzl b/python/private/py_library.bzl
index f6c7b12578..edd0db579f 100644
--- a/python/private/py_library.bzl
+++ b/python/private/py_library.bzl
@@ -14,6 +14,7 @@
"""Common code for implementing py_library rules."""
load("@bazel_skylib//lib:dicts.bzl", "dicts")
+load("@bazel_skylib//lib:paths.bzl", "paths")
load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
load(":attr_builders.bzl", "attrb")
load(
@@ -25,8 +26,21 @@ load(
"REQUIRED_EXEC_GROUP_BUILDERS",
)
load(":builders.bzl", "builders")
-load(":common.bzl", "collect_cc_info", "collect_imports", "collect_runfiles", "create_instrumented_files_info", "create_library_semantics_struct", "create_output_group_info", "create_py_info", "filter_to_py_srcs", "get_imports")
-load(":flags.bzl", "AddSrcsToRunfilesFlag", "PrecompileFlag")
+load(
+ ":common.bzl",
+ "PYTHON_FILE_EXTENSIONS",
+ "collect_cc_info",
+ "collect_imports",
+ "collect_runfiles",
+ "create_instrumented_files_info",
+ "create_library_semantics_struct",
+ "create_output_group_info",
+ "create_py_info",
+ "filter_to_py_srcs",
+ "get_imports",
+ "runfiles_root_path",
+)
+load(":flags.bzl", "AddSrcsToRunfilesFlag", "PrecompileFlag", "VenvsSitePackages")
load(":precompile.bzl", "maybe_precompile")
load(":py_cc_link_params_info.bzl", "PyCcLinkParamsInfo")
load(":py_internal.bzl", "py_internal")
@@ -44,6 +58,46 @@ LIBRARY_ATTRS = dicts.add(
PY_SRCS_ATTRS,
IMPORTS_ATTRS,
{
+ "experimental_venvs_site_packages": lambda: attrb.Label(
+ doc = """
+**INTERNAL ATTRIBUTE. SHOULD ONLY BE SET BY rules_python-INTERNAL CODE.**
+
+:::{include} /_includes/experimental_api.md
+:::
+
+A flag that decides whether the library should treat its sources as a
+site-packages layout.
+
+When the flag is `yes`, then the `srcs` files are treated as a site-packages
+layout that is relative to the `imports` attribute. The `imports` attribute
+can have only a single element. It is a repo-relative runfiles path.
+
+For example, in the `my/pkg/BUILD.bazel` file, given
+`srcs=["site-packages/foo/bar.py"]`, specifying
+`imports=["my/pkg/site-packages"]` means `foo/bar.py` is the file path
+under the binary's venv site-packages directory that should be made available (i.e.
+`import foo.bar` will work).
+
+`__init__.py` files are treated specially to provide basic support for [implicit
+namespace packages](
+https://packaging.python.org/en/latest/guides/packaging-namespace-packages/#native-namespace-packages).
+However, the *content* of the files cannot be taken into account, merely their
+presence or absense. Stated another way: [pkgutil-style namespace packages](
+https://packaging.python.org/en/latest/guides/packaging-namespace-packages/#pkgutil-style-namespace-packages)
+won't be understood as namespace packages; they'll be seen as regular packages. This will
+likely lead to conflicts with other targets that contribute to the namespace.
+
+:::{tip}
+This attributes populates {obj}`PyInfo.site_packages_symlinks`, which is
+a topologically ordered depset. This means dependencies closer and earlier
+to a consumer have precedence. See {obj}`PyInfo.site_packages_symlinks` for
+more information.
+:::
+
+:::{versionadded} VERSION_NEXT_FEATURE
+:::
+""",
+ ),
"_add_srcs_to_runfiles_flag": lambda: attrb.Label(
default = "//python/config_settings:add_srcs_to_runfiles",
),
@@ -98,6 +152,11 @@ def py_library_impl(ctx, *, semantics):
runfiles.add(collect_runfiles(ctx))
runfiles = runfiles.build(ctx)
+ imports = []
+ site_packages_symlinks = []
+
+ imports, site_packages_symlinks = _get_imports_and_site_packages_symlinks(ctx, semantics)
+
cc_info = semantics.get_cc_info_for_library(ctx)
py_info, deps_transitive_sources, builtins_py_info = create_py_info(
ctx,
@@ -106,7 +165,8 @@ def py_library_impl(ctx, *, semantics):
required_pyc_files = required_pyc_files,
implicit_pyc_files = implicit_pyc_files,
implicit_pyc_source_files = implicit_pyc_source_files,
- imports = collect_imports(ctx, semantics),
+ imports = imports,
+ site_packages_symlinks = site_packages_symlinks,
)
# TODO(b/253059598): Remove support for extra actions; https://github.com/bazelbuild/bazel/issues/16455
@@ -144,6 +204,101 @@ Source files are no longer added to the runfiles directly.
:::
"""
+def _get_imports_and_site_packages_symlinks(ctx, semantics):
+ imports = depset()
+ site_packages_symlinks = depset()
+ if VenvsSitePackages.is_enabled(ctx):
+ site_packages_symlinks = _get_site_packages_symlinks(ctx)
+ else:
+ imports = collect_imports(ctx, semantics)
+ return imports, site_packages_symlinks
+
+def _get_site_packages_symlinks(ctx):
+ imports = ctx.attr.imports
+ if len(imports) == 0:
+ fail("When venvs_site_packages is enabled, exactly one `imports` " +
+ "value must be specified, got 0")
+ elif len(imports) > 1:
+ fail("When venvs_site_packages is enabled, exactly one `imports` " +
+ "value must be specified, got {}".format(imports))
+ else:
+ site_packages_root = imports[0]
+
+ if site_packages_root.endswith("/"):
+ fail("The site packages root value from `imports` cannot end in " +
+ "slash, got {}".format(site_packages_root))
+ if site_packages_root.startswith("/"):
+ fail("The site packages root value from `imports` cannot start with " +
+ "slash, got {}".format(site_packages_root))
+
+ # Append slash to prevent incorrectly prefix-string matches
+ site_packages_root += "/"
+
+ # We have to build a list of (runfiles path, site-packages path) pairs of
+ # the files to create in the consuming binary's venv site-packages directory.
+ # To minimize the number of files to create, we just return the paths
+ # to the directories containing the code of interest.
+ #
+ # However, namespace packages complicate matters: multiple
+ # distributions install in the same directory in site-packages. This
+ # works out because they don't overlap in their files. Typically, they
+ # install to different directories within the namespace package
+ # directory. Namespace package directories are simply directories
+ # within site-packages that *don't* have an `__init__.py` file, which
+ # can be arbitrarily deep. Thus, we simply have to look for the
+ # directories that _do_ have an `__init__.py` file and treat those as
+ # the path to symlink to.
+
+ repo_runfiles_dirname = None
+ dirs_with_init = {} # dirname -> runfile path
+ for src in ctx.files.srcs:
+ if src.extension not in PYTHON_FILE_EXTENSIONS:
+ continue
+ path = _repo_relative_short_path(src.short_path)
+ if not path.startswith(site_packages_root):
+ continue
+ path = path.removeprefix(site_packages_root)
+ dir_name, _, filename = path.rpartition("/")
+ if not dir_name:
+ # This would be e.g. `site-packages/__init__.py`, which isn't valid
+ # because it's not within a directory for an importable Python package.
+ # However, the pypi integration over-eagerly adds a pkgutil-style
+ # __init__.py file during the repo phase. Just ignore them for now.
+ continue
+
+ if filename.startswith("__init__."):
+ dirs_with_init[dir_name] = None
+ repo_runfiles_dirname = runfiles_root_path(ctx, src.short_path).partition("/")[0]
+
+ # Sort so that we encounter `foo` before `foo/bar`. This ensures we
+ # see the top-most explicit package first.
+ dirnames = sorted(dirs_with_init.keys())
+ first_level_explicit_packages = []
+ for d in dirnames:
+ is_sub_package = False
+ for existing in first_level_explicit_packages:
+ # Suffix with / to prevent foo matching foobar
+ if d.startswith(existing + "/"):
+ is_sub_package = True
+ break
+ if not is_sub_package:
+ first_level_explicit_packages.append(d)
+
+ site_packages_symlinks = []
+ for dirname in first_level_explicit_packages:
+ site_packages_symlinks.append((
+ paths.join(repo_runfiles_dirname, site_packages_root, dirname),
+ dirname,
+ ))
+ return site_packages_symlinks
+
+def _repo_relative_short_path(short_path):
+ # Convert `../+pypi+foo/some/file.py` to `some/file.py`
+ if short_path.startswith("../"):
+ return short_path[3:].partition("/")[2]
+ else:
+ return short_path
+
# NOTE: Exported publicaly
def create_py_library_rule_builder():
"""Create a rule builder for a py_library.
diff --git a/python/private/pypi/whl_library_targets.bzl b/python/private/pypi/whl_library_targets.bzl
index c390da2613..95031e6181 100644
--- a/python/private/pypi/whl_library_targets.bzl
+++ b/python/private/pypi/whl_library_targets.bzl
@@ -266,6 +266,7 @@ def whl_library_targets(
),
tags = tags,
visibility = impl_vis,
+ experimental_venvs_site_packages = Label("@rules_python//python/config_settings:venvs_site_packages"),
)
def _config_settings(dependencies_by_platform, native = native, **kwargs):
diff --git a/tests/modules/other/BUILD.bazel b/tests/modules/other/BUILD.bazel
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/tests/modules/other/MODULE.bazel b/tests/modules/other/MODULE.bazel
new file mode 100644
index 0000000000..7cd3118b81
--- /dev/null
+++ b/tests/modules/other/MODULE.bazel
@@ -0,0 +1,3 @@
+module(name = "other")
+
+bazel_dep(name = "rules_python", version = "0")
diff --git a/tests/modules/other/nspkg_delta/BUILD.bazel b/tests/modules/other/nspkg_delta/BUILD.bazel
new file mode 100644
index 0000000000..457033aacf
--- /dev/null
+++ b/tests/modules/other/nspkg_delta/BUILD.bazel
@@ -0,0 +1,10 @@
+load("@rules_python//python:py_library.bzl", "py_library")
+
+package(default_visibility = ["//visibility:public"])
+
+py_library(
+ name = "nspkg_delta",
+ srcs = glob(["site-packages/**/*.py"]),
+ experimental_venvs_site_packages = "@rules_python//python/config_settings:venvs_site_packages",
+ imports = [package_name() + "/site-packages"],
+)
diff --git a/tests/modules/other/nspkg_delta/site-packages/nspkg/subnspkg/delta/__init__.py b/tests/modules/other/nspkg_delta/site-packages/nspkg/subnspkg/delta/__init__.py
new file mode 100644
index 0000000000..bb7b160deb
--- /dev/null
+++ b/tests/modules/other/nspkg_delta/site-packages/nspkg/subnspkg/delta/__init__.py
@@ -0,0 +1 @@
+# Intentionally empty
diff --git a/tests/modules/other/nspkg_gamma/BUILD.bazel b/tests/modules/other/nspkg_gamma/BUILD.bazel
new file mode 100644
index 0000000000..89038e80d2
--- /dev/null
+++ b/tests/modules/other/nspkg_gamma/BUILD.bazel
@@ -0,0 +1,10 @@
+load("@rules_python//python:py_library.bzl", "py_library")
+
+package(default_visibility = ["//visibility:public"])
+
+py_library(
+ name = "nspkg_gamma",
+ srcs = glob(["site-packages/**/*.py"]),
+ experimental_venvs_site_packages = "@rules_python//python/config_settings:venvs_site_packages",
+ imports = [package_name() + "/site-packages"],
+)
diff --git a/tests/modules/other/nspkg_gamma/site-packages/nspkg/subnspkg/gamma/__init__.py b/tests/modules/other/nspkg_gamma/site-packages/nspkg/subnspkg/gamma/__init__.py
new file mode 100644
index 0000000000..bb7b160deb
--- /dev/null
+++ b/tests/modules/other/nspkg_gamma/site-packages/nspkg/subnspkg/gamma/__init__.py
@@ -0,0 +1 @@
+# Intentionally empty
diff --git a/tests/pypi/whl_library_targets/whl_library_targets_tests.bzl b/tests/pypi/whl_library_targets/whl_library_targets_tests.bzl
index a042ed0346..f738e03b5d 100644
--- a/tests/pypi/whl_library_targets/whl_library_targets_tests.bzl
+++ b/tests/pypi/whl_library_targets/whl_library_targets_tests.bzl
@@ -273,6 +273,7 @@ def _test_whl_and_library_deps(env):
),
"tags": ["tag1", "tag2"],
"visibility": ["//visibility:public"],
+ "experimental_venvs_site_packages": Label("//python/config_settings:venvs_site_packages"),
},
]) # buildifier: @unsorted-dict-items
@@ -335,6 +336,7 @@ def _test_group(env):
}),
"tags": [],
"visibility": ["@pypi__groups//:__pkg__"],
+ "experimental_venvs_site_packages": Label("//python/config_settings:venvs_site_packages"),
},
]) # buildifier: @unsorted-dict-items
diff --git a/tests/support/sh_py_run_test.bzl b/tests/support/sh_py_run_test.bzl
index 7b3b617da1..9c8134ff40 100644
--- a/tests/support/sh_py_run_test.bzl
+++ b/tests/support/sh_py_run_test.bzl
@@ -40,6 +40,8 @@ def _perform_transition_impl(input_settings, attr, base_impl):
settings["//python/bin:python_src"] = attr.python_src
if attr.venvs_use_declare_symlink:
settings["//python/config_settings:venvs_use_declare_symlink"] = attr.venvs_use_declare_symlink
+ if attr.venvs_site_packages:
+ settings["//python/config_settings:venvs_site_packages"] = attr.venvs_site_packages
return settings
_RECONFIG_INPUTS = [
@@ -47,6 +49,7 @@ _RECONFIG_INPUTS = [
"//python/bin:python_src",
"//command_line_option:extra_toolchains",
"//python/config_settings:venvs_use_declare_symlink",
+ "//python/config_settings:venvs_site_packages",
]
_RECONFIG_OUTPUTS = _RECONFIG_INPUTS + [
"//command_line_option:build_python_zip",
@@ -67,6 +70,7 @@ toolchain.
""",
),
"python_src": attrb.Label(),
+ "venvs_site_packages": attrb.String(),
"venvs_use_declare_symlink": attrb.String(),
}
diff --git a/tests/venv_site_packages_libs/BUILD.bazel b/tests/venv_site_packages_libs/BUILD.bazel
new file mode 100644
index 0000000000..5d02708800
--- /dev/null
+++ b/tests/venv_site_packages_libs/BUILD.bazel
@@ -0,0 +1,17 @@
+load("//tests/support:sh_py_run_test.bzl", "py_reconfig_test")
+load("//tests/support:support.bzl", "SUPPORTS_BOOTSTRAP_SCRIPT")
+
+py_reconfig_test(
+ name = "venvs_site_packages_libs_test",
+ srcs = ["bin.py"],
+ bootstrap_impl = "script",
+ main = "bin.py",
+ target_compatible_with = SUPPORTS_BOOTSTRAP_SCRIPT,
+ venvs_site_packages = "yes",
+ deps = [
+ "//tests/venv_site_packages_libs/nspkg_alpha",
+ "//tests/venv_site_packages_libs/nspkg_beta",
+ "@other//nspkg_delta",
+ "@other//nspkg_gamma",
+ ],
+)
diff --git a/tests/venv_site_packages_libs/bin.py b/tests/venv_site_packages_libs/bin.py
new file mode 100644
index 0000000000..b944be69e3
--- /dev/null
+++ b/tests/venv_site_packages_libs/bin.py
@@ -0,0 +1,32 @@
+import importlib
+import os
+import sys
+import unittest
+
+
+class VenvSitePackagesLibraryTest(unittest.TestCase):
+ def setUp(self):
+ super().setUp()
+ if sys.prefix == sys.base_prefix:
+ raise AssertionError("Not running under a venv")
+ self.venv = sys.prefix
+
+ def assert_imported_from_venv(self, module_name):
+ module = importlib.import_module(module_name)
+ self.assertEqual(module.__name__, module_name)
+ self.assertTrue(
+ module.__file__.startswith(self.venv),
+ f"\n{module_name} was imported, but not from the venv.\n"
+ + f"venv : {self.venv}\n"
+ + f"actual: {module.__file__}",
+ )
+
+ def test_imported_from_venv(self):
+ self.assert_imported_from_venv("nspkg.subnspkg.alpha")
+ self.assert_imported_from_venv("nspkg.subnspkg.beta")
+ self.assert_imported_from_venv("nspkg.subnspkg.gamma")
+ self.assert_imported_from_venv("nspkg.subnspkg.delta")
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/tests/venv_site_packages_libs/nspkg_alpha/BUILD.bazel b/tests/venv_site_packages_libs/nspkg_alpha/BUILD.bazel
new file mode 100644
index 0000000000..c40c3b4080
--- /dev/null
+++ b/tests/venv_site_packages_libs/nspkg_alpha/BUILD.bazel
@@ -0,0 +1,10 @@
+load("@rules_python//python:py_library.bzl", "py_library")
+
+package(default_visibility = ["//visibility:public"])
+
+py_library(
+ name = "nspkg_alpha",
+ srcs = glob(["site-packages/**/*.py"]),
+ experimental_venvs_site_packages = "//python/config_settings:venvs_site_packages",
+ imports = [package_name() + "/site-packages"],
+)
diff --git a/tests/venv_site_packages_libs/nspkg_alpha/site-packages/nspkg/subnspkg/alpha/__init__.py b/tests/venv_site_packages_libs/nspkg_alpha/site-packages/nspkg/subnspkg/alpha/__init__.py
new file mode 100644
index 0000000000..b5ee093672
--- /dev/null
+++ b/tests/venv_site_packages_libs/nspkg_alpha/site-packages/nspkg/subnspkg/alpha/__init__.py
@@ -0,0 +1 @@
+whoami = "alpha"
diff --git a/tests/venv_site_packages_libs/nspkg_beta/BUILD.bazel b/tests/venv_site_packages_libs/nspkg_beta/BUILD.bazel
new file mode 100644
index 0000000000..5d402183bd
--- /dev/null
+++ b/tests/venv_site_packages_libs/nspkg_beta/BUILD.bazel
@@ -0,0 +1,10 @@
+load("@rules_python//python:py_library.bzl", "py_library")
+
+package(default_visibility = ["//visibility:public"])
+
+py_library(
+ name = "nspkg_beta",
+ srcs = glob(["site-packages/**/*.py"]),
+ experimental_venvs_site_packages = "//python/config_settings:venvs_site_packages",
+ imports = [package_name() + "/site-packages"],
+)
diff --git a/tests/venv_site_packages_libs/nspkg_beta/site-packages/nspkg/subnspkg/beta/__init__.py b/tests/venv_site_packages_libs/nspkg_beta/site-packages/nspkg/subnspkg/beta/__init__.py
new file mode 100644
index 0000000000..a2a65910c7
--- /dev/null
+++ b/tests/venv_site_packages_libs/nspkg_beta/site-packages/nspkg/subnspkg/beta/__init__.py
@@ -0,0 +1 @@
+whoami = "beta"
diff --git a/tests/venv_site_packages_libs/venv_site_packages_pypi_test.py b/tests/venv_site_packages_libs/venv_site_packages_pypi_test.py
new file mode 100644
index 0000000000..519b258044
--- /dev/null
+++ b/tests/venv_site_packages_libs/venv_site_packages_pypi_test.py
@@ -0,0 +1,36 @@
+import os
+import sys
+import unittest
+
+
+class VenvSitePackagesLibraryTest(unittest.TestCase):
+ def test_imported_from_venv(self):
+ self.assertNotEqual(sys.prefix, sys.base_prefix, "Not running under a venv")
+ venv = sys.prefix
+
+ from nspkg.subnspkg import alpha
+
+ self.assertEqual(alpha.whoami, "alpha")
+ self.assertEqual(alpha.__name__, "nspkg.subnspkg.alpha")
+
+ self.assertTrue(
+ alpha.__file__.startswith(sys.prefix),
+ f"\nalpha was imported, not from within the venv.\n"
+ + f"venv : {venv}\n"
+ + f"actual: {alpha.__file__}",
+ )
+
+ from nspkg.subnspkg import beta
+
+ self.assertEqual(beta.whoami, "beta")
+ self.assertEqual(beta.__name__, "nspkg.subnspkg.beta")
+ self.assertTrue(
+ beta.__file__.startswith(sys.prefix),
+ f"\nbeta was imported, not from within the venv.\n"
+ + f"venv : {venv}\n"
+ + f"actual: {beta.__file__}",
+ )
+
+
+if __name__ == "__main__":
+ unittest.main()
From e5fa023b27cf3583eb9e45efcbcb887e660ce65f Mon Sep 17 00:00:00 2001
From: Richard Levasseur
Date: Sat, 5 Apr 2025 10:05:08 -0700
Subject: [PATCH 008/156] docs: fix a few xrefs (#2740)
Fixes a few xrefs in the docs that had typos or missing external bazel
links.
---
CHANGELOG.md | 2 +-
docs/api/rules_python/python/config_settings/index.md | 2 +-
docs/toolchains.md | 4 ++--
python/private/py_executable.bzl | 2 +-
sphinxdocs/inventories/bazel_inventory.txt | 8 ++++++++
5 files changed, 13 insertions(+), 5 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 818773e589..5172e742c9 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -68,7 +68,7 @@ Unreleased changes template.
using `experimental_index_url`.
* (toolchains) Remove all but `3.8.20` versions of the Python `3.8` interpreter who has
reached EOL. If users still need other versions of the `3.8` interpreter, please supply
- the URLs manually {bzl:ob}`python.toolchain` or {bzl:obj}`python_register_toolchains` calls.
+ the URLs manually {bzl:obj}`python.toolchain` or {bzl:obj}`python_register_toolchains` calls.
* (pypi) The PyPI extension will no longer write the lock file entries as the
extension has been marked reproducible.
Fixes [#2434](https://github.com/bazel-contrib/rules_python/issues/2434).
diff --git a/docs/api/rules_python/python/config_settings/index.md b/docs/api/rules_python/python/config_settings/index.md
index 340335d9b1..ed6444298e 100644
--- a/docs/api/rules_python/python/config_settings/index.md
+++ b/docs/api/rules_python/python/config_settings/index.md
@@ -46,7 +46,7 @@ of builtin, known versions.
If you need to match a version that isn't present, then you have two options:
1. Manually define a `config_setting` and have it match {obj}`--python_version`
- or {ob}`python_version_major_minor`. This works best when you don't control the
+ or {obj}`python_version_major_minor`. This works best when you don't control the
root module, or don't want to rely on the MODULE.bazel configuration. Such
a config settings would look like:
```
diff --git a/docs/toolchains.md b/docs/toolchains.md
index 0e4f5c2321..73a8a48121 100644
--- a/docs/toolchains.md
+++ b/docs/toolchains.md
@@ -265,7 +265,7 @@ use_repo(python, "python_3_10", "python_3_10_host")
```
Note, the user has to import the `*_host` repository to use the python interpreter in the
-{bzl:obj}`pip_parse` and {bzl:obj}`whl_library` repository rules and once that is done
+{bzl:obj}`pip_parse` and `whl_library` repository rules and once that is done
users should be able to ensure the setting of the default toolchain even during the
transition period when some of the code is still defined in `WORKSPACE`.
@@ -364,7 +364,7 @@ toolchains a "toolchain suite".
One of the underlying design goals of the toolchains is to support complex and
bespoke environments. Such environments may use an arbitrary combination of
-{obj}`RBE`, cross-platform building, multiple Python versions,
+{bzl:obj}`RBE`, cross-platform building, multiple Python versions,
building Python from source, embeding Python (as opposed to building separate
interpreters), using prebuilt binaries, or using binaries built from source. To
that end, many of the attributes they accept, and fields they provide, are
diff --git a/python/private/py_executable.bzl b/python/private/py_executable.bzl
index f33c2b6ca1..e6f4700b20 100644
--- a/python/private/py_executable.bzl
+++ b/python/private/py_executable.bzl
@@ -92,7 +92,7 @@ Only supported for {obj}`--bootstrap_impl=script`. Ignored otherwise.
:::
:::{seealso}
-The {obj}`RULES_PYTHON_ADDITIONAL_INTERPRETER_ARGS` environment variable
+The {any}`RULES_PYTHON_ADDITIONAL_INTERPRETER_ARGS` environment variable
:::
:::{versionadded} 1.3.0
diff --git a/sphinxdocs/inventories/bazel_inventory.txt b/sphinxdocs/inventories/bazel_inventory.txt
index dc11f02b5b..458126a849 100644
--- a/sphinxdocs/inventories/bazel_inventory.txt
+++ b/sphinxdocs/inventories/bazel_inventory.txt
@@ -28,6 +28,14 @@ attr.string_list bzl:type 1 rules/lib/toplevel/attr#string_list -
attr.string_list_dict bzl:type 1 rules/lib/toplevel/attr#string_list_dict -
bool bzl:type 1 rules/lib/bool -
callable bzl:type 1 rules/lib/core/function -
+config bzl:obj 1 rules/lib/toplevel/config -
+config.bool bzl:function 1 rules/lib/toplevel/config#bool -
+config.exec bzl:function 1 rules/lib/toplevel/config#exec -
+config.int bzl:function 1 rules/lib/toplevel/config#int -
+config.none bzl:function 1 rules/lib/toplevel/config#none -
+config.string bzl:function 1 rules/lib/toplevel/config#string -
+config.string_list bzl:function 1 rules/lib/toplevel/config#string_list -
+config.target bzl:function 1 rules/lib/toplevel/config#target -
config_common.FeatureFlagInfo bzl:type 1 rules/lib/toplevel/config_common#FeatureFlagInfo -
config_common.toolchain_type bzl:function 1 rules/lib/toplevel/config_common#toolchain_type -
ctx.actions bzl:obj 1 rules/lib/builtins/ctx#actions -
From 6854dc3880b1ff81659ad4a36fb2e6551f41d0e2 Mon Sep 17 00:00:00 2001
From: Matt Mackay
Date: Sat, 5 Apr 2025 14:42:03 -0400
Subject: [PATCH 009/156] fix: treat ignore_root_user_error either ignored or
warning (#2739)
Previously
[#2636](https://github.com/bazel-contrib/rules_python/pull/2636) changed
the semantics of `ignore_root_user_error` from "ignore" to "warning".
This is now flipped back to ignoring the issue, and will only emit a
warning when the attribute is set `False`.
This does also change the semantics of what #2636 did by flipping the
attribute, as now there is no warning, and the user would have to
explicitly set it to `False` (they don't want to ignore the error) to
see the warning.
Co-authored-by: Richard Levasseur
---
CHANGELOG.md | 4 +++
python/private/python.bzl | 4 +--
python/private/python_repository.bzl | 40 +++++++++++++++-------------
3 files changed, 27 insertions(+), 21 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 5172e742c9..dbb0c03e59 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -69,6 +69,10 @@ Unreleased changes template.
* (toolchains) Remove all but `3.8.20` versions of the Python `3.8` interpreter who has
reached EOL. If users still need other versions of the `3.8` interpreter, please supply
the URLs manually {bzl:obj}`python.toolchain` or {bzl:obj}`python_register_toolchains` calls.
+* (toolchains) Previously [#2636](https://github.com/bazel-contrib/rules_python/pull/2636)
+ changed the semantics of `ignore_root_user_error` from "ignore" to "warning". This is now
+ flipped back to ignoring the issue, and will only emit a warning when the attribute is set
+ `False`.
* (pypi) The PyPI extension will no longer write the lock file entries as the
extension has been marked reproducible.
Fixes [#2434](https://github.com/bazel-contrib/rules_python/issues/2434).
diff --git a/python/private/python.bzl b/python/private/python.bzl
index 296fb0ab7d..efc429420e 100644
--- a/python/private/python.bzl
+++ b/python/private/python.bzl
@@ -803,8 +803,8 @@ to spurious cache misses or build failures).
However, if the user is running Bazel as root, this read-onlyness is not
respected. Bazel will print a warning message when it detects that the runtime
installation is writable despite being made read only (i.e. it's running with
-root access). If this attribute is set to `False`, Bazel will make it a hard
-error to run with root access instead.
+root access) while this attribute is set `False`, however this messaging can be ignored by setting
+this to `False`.
""",
mandatory = False,
),
diff --git a/python/private/python_repository.bzl b/python/private/python_repository.bzl
index f3ec13d67d..cfc06452a9 100644
--- a/python/private/python_repository.bzl
+++ b/python/private/python_repository.bzl
@@ -137,28 +137,30 @@ def _python_repository_impl(rctx):
logger = logger,
)
- fail_or_warn = logger.warn if rctx.attr.ignore_root_user_error else logger.fail
- exec_result = repo_utils.execute_unchecked(
- rctx,
- op = "python_repository.TestReadOnly",
- arguments = [repo_utils.which_checked(rctx, "touch"), "lib/.test"],
- logger = logger,
- )
-
- # The issue with running as root is the installation is no longer
- # read-only, so the problems due to pyc can resurface.
- if exec_result.return_code == 0:
- stdout = repo_utils.execute_checked_stdout(
+ # If the user is not ignoring the warnings, then proceed to run a check,
+ # otherwise these steps can be skipped, as they both result in some warning.
+ if not rctx.attr.ignore_root_user_error:
+ exec_result = repo_utils.execute_unchecked(
rctx,
- op = "python_repository.GetUserId",
- arguments = [repo_utils.which_checked(rctx, "id"), "-u"],
+ op = "python_repository.TestReadOnly",
+ arguments = [repo_utils.which_checked(rctx, "touch"), "lib/.test"],
logger = logger,
)
- uid = int(stdout.strip())
- if uid == 0:
- fail_or_warn("The current user is root, which can cause spurious cache misses or build failures with the hermetic Python interpreter. See https://github.com/bazel-contrib/rules_python/pull/713.")
- else:
- fail_or_warn("The current user has CAP_DAC_OVERRIDE set, which can cause spurious cache misses or build failures with the hermetic Python interpreter. See https://github.com/bazel-contrib/rules_python/pull/713.")
+
+ # The issue with running as root is the installation is no longer
+ # read-only, so the problems due to pyc can resurface.
+ if exec_result.return_code == 0:
+ stdout = repo_utils.execute_checked_stdout(
+ rctx,
+ op = "python_repository.GetUserId",
+ arguments = [repo_utils.which_checked(rctx, "id"), "-u"],
+ logger = logger,
+ )
+ uid = int(stdout.strip())
+ if uid == 0:
+ logger.warn("The current user is root, which can cause spurious cache misses or build failures with the hermetic Python interpreter. See https://github.com/bazel-contrib/rules_python/pull/713.")
+ else:
+ logger.warn("The current user has CAP_DAC_OVERRIDE set, which can cause spurious cache misses or build failures with the hermetic Python interpreter. See https://github.com/bazel-contrib/rules_python/pull/713.")
python_bin = "python.exe" if ("windows" in platform) else "bin/python3"
From 7f5a1b5a0e6fbe29c5c33d8e164b4cda6ded99b7 Mon Sep 17 00:00:00 2001
From: Matt Mackay
Date: Sat, 5 Apr 2025 18:48:14 -0400
Subject: [PATCH 010/156] fix: Ensure temporary .pyc & .pyo files are excluded
from the interpreters repository files (#2743)
We've seen cases the temporary versions for the `.pyc` and `.pyo` files
are unstable on certain interpreter toolchains. The temp files take for
form of `.pyc.NNN`, so the amended glob patten will still match both the
`.pyc` and `.pyc.NNN` versions of the file names.
---------
Co-authored-by: Richard Levasseur
---
CHANGELOG.md | 1 +
python/private/python_repository.bzl | 5 +++--
2 files changed, 4 insertions(+), 2 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index dbb0c03e59..abe718c389 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -90,6 +90,7 @@ Unreleased changes template.
transitions transitioning on the `python_version` flag.
Fixes [#2685](https://github.com/bazel-contrib/rules_python/issues/2685).
* (toolchains) Run the check on the Python interpreter in isolated mode, to ensure it's not affected by userland environment variables, such as `PYTHONPATH`.
+* (toolchains) Ensure temporary `.pyc` and `.pyo` files are also excluded from the interpreters repository files.
{#v0-0-0-added}
### Added
diff --git a/python/private/python_repository.bzl b/python/private/python_repository.bzl
index cfc06452a9..fd86b415cc 100644
--- a/python/private/python_repository.bzl
+++ b/python/private/python_repository.bzl
@@ -193,8 +193,9 @@ def _python_repository_impl(rctx):
# Exclude them from the glob because otherwise between the first time and second time a python toolchain is used,"
# the definition of this filegroup will change, and depending rules will get invalidated."
# See https://github.com/bazel-contrib/rules_python/issues/1008 for unconditionally adding these to toolchains so we can stop ignoring them."
- "**/__pycache__/*.pyc",
- "**/__pycache__/*.pyo",
+ # pyc* is ignored because pyc creation creates temporary .pyc.NNNN files
+ "**/__pycache__/*.pyc*",
+ "**/__pycache__/*.pyo*",
]
if "windows" in platform:
From da0e52f59047ab47bcb561787d42a8f93537dc41 Mon Sep 17 00:00:00 2001
From: Richard Levasseur
Date: Sat, 5 Apr 2025 16:47:44 -0700
Subject: [PATCH 011/156] chore: remove unnecessary DEFAULT_BOOTSTRAP_TEMPLATE
global (#2744)
I think the DEFAULT_BOOTSTRAP_TEMPLATE global was used by something in
the original
Bazel impl, but now it's just used in one place. Remove the shared
global and just
inline the single usage.
---
python/private/py_runtime_info.bzl | 2 --
python/private/py_runtime_rule.bzl | 4 ++--
2 files changed, 2 insertions(+), 4 deletions(-)
diff --git a/python/private/py_runtime_info.bzl b/python/private/py_runtime_info.bzl
index 19857c9ede..4297391068 100644
--- a/python/private/py_runtime_info.bzl
+++ b/python/private/py_runtime_info.bzl
@@ -17,8 +17,6 @@ load(":util.bzl", "define_bazel_6_provider")
DEFAULT_STUB_SHEBANG = "#!/usr/bin/env python3"
-DEFAULT_BOOTSTRAP_TEMPLATE = Label("//python/private:bootstrap_template")
-
_PYTHON_VERSION_VALUES = ["PY2", "PY3"]
def _optional_int(value):
diff --git a/python/private/py_runtime_rule.bzl b/python/private/py_runtime_rule.bzl
index 3dc00baa12..a85f5b25f2 100644
--- a/python/private/py_runtime_rule.bzl
+++ b/python/private/py_runtime_rule.bzl
@@ -19,7 +19,7 @@ load("@bazel_skylib//rules:common_settings.bzl", "BuildSettingInfo")
load(":attributes.bzl", "NATIVE_RULES_ALLOWLIST_ATTRS")
load(":flags.bzl", "FreeThreadedFlag")
load(":py_internal.bzl", "py_internal")
-load(":py_runtime_info.bzl", "DEFAULT_BOOTSTRAP_TEMPLATE", "DEFAULT_STUB_SHEBANG", "PyRuntimeInfo")
+load(":py_runtime_info.bzl", "DEFAULT_STUB_SHEBANG", "PyRuntimeInfo")
load(":reexports.bzl", "BuiltinPyRuntimeInfo")
load(":util.bzl", "IS_BAZEL_7_OR_HIGHER")
@@ -201,7 +201,7 @@ If not set, then it will be set based on flags.
),
"bootstrap_template": attr.label(
allow_single_file = True,
- default = DEFAULT_BOOTSTRAP_TEMPLATE,
+ default = Label("//python/private:bootstrap_template"),
doc = """
The bootstrap script template file to use. Should have %python_binary%,
%workspace_name%, %main%, and %imports%.
From 996ae2658bffe7163a5abc384eff57ff28d4f409 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 7 Apr 2025 23:16:24 +0000
Subject: [PATCH 012/156] build(deps): bump jinja2 from 3.1.4 to 3.1.6 in /docs
(#2750)
Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.4 to 3.1.6.
Release notes
Sourced from jinja2's
releases.
3.1.6
This is the Jinja 3.1.6 security release, which fixes security issues
but does not otherwise change behavior and should not result in breaking
changes compared to the latest feature release.
PyPI: https://pypi.org/project/Jinja2/3.1.6/
Changes: https://jinja.palletsprojects.com/en/stable/changes/#version-3-1-6
3.1.5
This is the Jinja 3.1.5 security fix release, which fixes security
issues and bugs but does not otherwise change behavior and should not
result in breaking changes compared to the latest feature release.
PyPI: https://pypi.org/project/Jinja2/3.1.5/
Changes: https://jinja.palletsprojects.com/changes/#version-3-1-5
Milestone: https://github.com/pallets/jinja/milestone/16?closed=1
- The sandboxed environment handles indirect calls to
str.format
, such as by passing a stored reference to a
filter that calls its argument. GHSA-q2x7-8rv6-6q7h
- Escape template name before formatting it into error messages, to
avoid issues with names that contain f-string syntax. #1792,
GHSA-gmj6-6f8f-6699
- Sandbox does not allow
clear
and pop
on
known mutable sequence types. #2032
- Calling sync
render
for an async template uses
asyncio.run
. #1952
- Avoid unclosed
auto_aiter
warnings. #1960
- Return an
aclose
-able AsyncGenerator
from
Template.generate_async
. #1960
- Avoid leaving
root_render_func()
unclosed in
Template.generate_async
. #1960
- Avoid leaving async generators unclosed in blocks, includes and
extends. #1960
- The runtime uses the correct
concat
function for the
current environment when calling block references. #1701
- Make
|unique
async-aware, allowing it to be used after
another async-aware filter. #1781
|int
filter handles OverflowError
from
scientific notation. #1921
- Make compiling deterministic for tuple unpacking in a
{% set
... %}
call. #2021
- Fix dunder protocol (
copy
/pickle
/etc)
interaction with Undefined
objects. #2025
- Fix
copy
/pickle
support for the internal
missing
object. #2027
Environment.overlay(enable_async)
is applied correctly.
#2061
- The error message from
FileSystemLoader
includes the
paths that were searched. #1661
PackageLoader
shows a clearer error message when the
package does not contain the templates directory. #1705
- Improve annotations for methods returning copies. #1880
urlize
does not add mailto:
to values like
@a@b
. #1870
- Tests decorated with
@pass_context
can be used with the
|select
filter. #1624
- Using
set
for multiple assignment (a, b = 1,
2
) does not fail when the target is a namespace attribute. #1413
- Using
set
in all branches of {% if %}{% elif %}{%
else %}
blocks does not cause the variable to be considered
initially undefined. #1253
Changelog
Sourced from jinja2's
changelog.
Version 3.1.6
Released 2025-03-05
- The
|attr
filter does not bypass the environment's
attribute lookup,
allowing the sandbox to apply its checks.
:ghsa:cpwx-vrp4-4pq7
Version 3.1.5
Released 2024-12-21
- The sandboxed environment handles indirect calls to
str.format
, such as
by passing a stored reference to a filter that calls its argument.
:ghsa:q2x7-8rv6-6q7h
- Escape template name before formatting it into error messages, to
avoid
issues with names that contain f-string syntax.
:issue:
1792
, :ghsa:gmj6-6f8f-6699
- Sandbox does not allow
clear
and pop
on
known mutable sequence
types. :issue:2032
- Calling sync
render
for an async template uses
asyncio.run
.
:pr:1952
- Avoid unclosed
auto_aiter
warnings.
:pr:1960
- Return an
aclose
-able AsyncGenerator
from
Template.generate_async
. :pr:1960
- Avoid leaving
root_render_func()
unclosed in
Template.generate_async
. :pr:1960
- Avoid leaving async generators unclosed in blocks, includes and
extends.
:pr:
1960
- The runtime uses the correct
concat
function for the
current environment
when calling block references. :issue:1701
- Make
|unique
async-aware, allowing it to be used after
another
async-aware filter. :issue:1781
|int
filter handles OverflowError
from
scientific notation.
:issue:1921
- Make compiling deterministic for tuple unpacking in a
{% set
... %}
call. :issue:2021
- Fix dunder protocol (
copy
/pickle
/etc)
interaction with Undefined
objects. :issue:2025
- Fix
copy
/pickle
support for the internal
missing
object.
:issue:2027
Environment.overlay(enable_async)
is applied correctly.
:pr:2061
- The error message from
FileSystemLoader
includes the
paths that were
searched. :issue:1661
PackageLoader
shows a clearer error message when the
package does not
contain the templates directory. :issue:1705
- Improve annotations for methods returning copies.
:pr:
1880
urlize
does not add mailto:
to values like
@a@b
. :pr:1870
... (truncated)
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
docs/requirements.txt | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/docs/requirements.txt b/docs/requirements.txt
index e838daca8f..0b4909535a 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -148,9 +148,9 @@ imagesize==1.4.1 \
--hash=sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b \
--hash=sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a
# via sphinx
-jinja2==3.1.4 \
- --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \
- --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d
+jinja2==3.1.6 \
+ --hash=sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d \
+ --hash=sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67
# via
# myst-parser
# readthedocs-sphinx-ext
From 8bda670add1c490477a3ac9914405c802a087847 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 7 Apr 2025 23:18:33 +0000
Subject: [PATCH 013/156] build(deps): bump absl-py from 2.1.0 to 2.2.2 in
/docs (#2751)
Bumps [absl-py](https://github.com/abseil/abseil-py) from 2.1.0 to
2.2.2.
Release notes
Sourced from absl-py's
releases.
v2.2.2
Added
- (testing) Added a new method
absltest.TestCase.assertMappingEqual
that tests equality of
Mapping objects not requiring them to be dicts. Similar to
assertSequenceEqual
but for mappings.
- (testing) Added a new method
absltest.assertDictContainsSubset
that checks that a
dictionary contains a subset of keys and values. Similar to a removed
method unittest.assertDictContainsSubset
(existed until
Python 3.11).
- Added type annotations that are compliant with MyPy.
Changed
- Removed support for Python 3.7.
Fixed
- (testing) Fixed an issue where the test reporter crashes with
exceptions with no string representation, starting with Python
3.11.
(The change log also includes changes in 2.2.0
and
2.2.1
.)
Changelog
Sourced from absl-py's
changelog.
Python Absl Changelog
All notable changes to Python Absl are recorded here.
The format is based on Keep a
Changelog.
Unreleased
Nothing notable unreleased.
-
(testing) Added a new method
absltest.TestCase.assertMappingEqual
that
tests equality of Mapping objects not requiring them to be dicts.
Similar
to assertSequenceEqual
but for mappings.
-
(testing) Added a new method
absltest.assertDictContainsSubset
that
checks that a dictionary contains a subset of keys and values. Similar
to a removed method unittest.assertDictContainsSubset
(existed until Python 3.11).
Fixed
- (testing) Fixed an issue where the test reporter crashes with
exceptions with
no string representation, starting with Python 3.11.
Commits
4de3812
Fixing a typo in hex regex in logging_functional_test.py
e889843
Exclude files and bump version to 2.2.2
d45bb4b
Bump absl-py version to 2.2.1 to prepare for a release
014aa0a
Fixing the behavior of assertDictAlmostEqual
57ea862
Bump absl-py version to 2.2 to prepare for a release
214f0ff
Changing assertMappingEqual to support arbitrary equality function. Also
addi...
c98852f
Avoid double negation in the error message for required flags.
f1cd92d
Updating string substitution with modern f-string style in
assertMappingEqual...
f63fe8d
pytype fails to build the target in Python 3.12. suppress a misleading
type w...
6609299
Minor improvements of assertDictContainsSubset method.
- Additional commits viewable in compare
view
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
docs/requirements.txt | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 0b4909535a..66d41a963f 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -2,9 +2,9 @@
# bazel run //docs:requirements.update
--index-url https://pypi.org/simple
-absl-py==2.1.0 \
- --hash=sha256:526a04eadab8b4ee719ce68f204172ead1027549089702d99b9059f129ff1308 \
- --hash=sha256:7820790efbb316739cde8b4e19357243fc3608a152024288513dd968d7d959ff
+absl-py==2.2.2 \
+ --hash=sha256:bf25b2c2eed013ca456918c453d687eab4e8309fba81ee2f4c1a6aa2494175eb \
+ --hash=sha256:e5797bc6abe45f64fd95dc06394ca3f2bedf3b5d895e9da691c9ee3397d70092
# via rules-python-docs (docs/pyproject.toml)
alabaster==1.0.0 \
--hash=sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e \
From 23157f96117cc82adb540030e9da737b8811608d Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 8 Apr 2025 10:46:41 +0900
Subject: [PATCH 014/156] build(deps): bump charset-normalizer from 3.4.0 to
3.4.1 in /tools/publish (#2753)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps [charset-normalizer](https://github.com/jawah/charset_normalizer)
from 3.4.0 to 3.4.1.
Release notes
Sourced from charset-normalizer's
releases.
Version 3.4.1
🚀 We're still raising awareness around HTTP/2, and HTTP/3!
Did you know that Internet Explorer 11 shipped with an optional
HTTP/2 support back in 2013? also libcurl did ship it in 2014[...]
Using Requests today is the rough equivalent of using EOL Windows 8! We
promptly invite Python developers to look at the first drop-in
replacement for Requests, namely Niquests. Ship with
native WebSocket, SSE, Happy Eyeballs, DNS over HTTPS, and so on[...]
All of this while remaining compatible with all Requests prior plug-ins
/ add-ons.
It leverages charset-normalizer in a better way! Check it out, you
will gain up to being 3X faster and get a real/respectable support with
it.
3.4.1
(2024-12-24)
Changed
- Project metadata are now stored using
pyproject.toml
instead of setup.cfg
using setuptools as the build
backend.
- Enforce annotation delayed loading for a simpler and consistent
types in the project.
- Optional mypyc compilation upgraded to version 1.14 for Python >=
3.8
Added
- pre-commit configuration.
- noxfile.
Removed
build-requirements.txt
as per using
pyproject.toml
native build configuration.
bin/integration.py
and bin/serve.py
in
favor of downstream integration test (see noxfile).
setup.cfg
in favor of pyproject.toml
metadata configuration.
- Unused
utils.range_scan
function.
Fixed
- Converting content to Unicode bytes may insert
utf_8
instead of preferred utf-8
. (#572)
- Deprecation warning "'count' is passed as positional
argument" when converting to Unicode bytes on Python 3.13+
Changelog
Sourced from charset-normalizer's
changelog.
3.4.1
(2024-12-24)
Changed
- Project metadata are now stored using
pyproject.toml
instead of setup.cfg
using setuptools as the build
backend.
- Enforce annotation delayed loading for a simpler and consistent
types in the project.
- Optional mypyc compilation upgraded to version 1.14 for Python >=
3.8
Added
- pre-commit configuration.
- noxfile.
Removed
build-requirements.txt
as per using
pyproject.toml
native build configuration.
bin/integration.py
and bin/serve.py
in
favor of downstream integration test (see noxfile).
setup.cfg
in favor of pyproject.toml
metadata configuration.
- Unused
utils.range_scan
function.
Fixed
- Converting content to Unicode bytes may insert
utf_8
instead of preferred utf-8
. (#572)
- Deprecation warning "'count' is passed as positional
argument" when converting to Unicode bytes on Python 3.13+
Commits
ffdf7f5
:wrench: fix long description content-type inferred as rst instead of
md
c7197b7
:pencil: fix changelog entries (#582)
c390e1f
Merge pull request #581
from jawah/refresh-part-2
f9d6b8c
:lock: add CODEOWNERS
7ce1ef1
:wrench: use ubuntu-22.04 for cibuildwheel in continuous deployment
workflow
deed205
:wrench: update LICENSE copyright
f11f571
:wrench: include noxfile in sdist
1ec7c06
:wrench: update changelog
14b4649
:bug: output(...) replace declarative mark using non iana compliant
encoding ...
1b06bc0
Merge branch 'refresh-part-2' of github.com:jawah/charset_normalizer
into ref...
- Additional commits viewable in compare
view
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
tools/publish/requirements_darwin.txt | 199 +++++++++++------------
tools/publish/requirements_linux.txt | 199 +++++++++++------------
tools/publish/requirements_universal.txt | 199 +++++++++++------------
tools/publish/requirements_windows.txt | 199 +++++++++++------------
4 files changed, 372 insertions(+), 424 deletions(-)
diff --git a/tools/publish/requirements_darwin.txt b/tools/publish/requirements_darwin.txt
index e8ee1e9b89..5f8a33c3f5 100644
--- a/tools/publish/requirements_darwin.txt
+++ b/tools/publish/requirements_darwin.txt
@@ -10,112 +10,99 @@ certifi==2025.1.31 \
--hash=sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651 \
--hash=sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe
# via requests
-charset-normalizer==3.4.0 \
- --hash=sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621 \
- --hash=sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6 \
- --hash=sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8 \
- --hash=sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912 \
- --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \
- --hash=sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b \
- --hash=sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d \
- --hash=sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d \
- --hash=sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95 \
- --hash=sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e \
- --hash=sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565 \
- --hash=sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64 \
- --hash=sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab \
- --hash=sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be \
- --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \
- --hash=sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907 \
- --hash=sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0 \
- --hash=sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2 \
- --hash=sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62 \
- --hash=sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62 \
- --hash=sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23 \
- --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \
- --hash=sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284 \
- --hash=sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca \
- --hash=sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455 \
- --hash=sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858 \
- --hash=sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b \
- --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \
- --hash=sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc \
- --hash=sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db \
- --hash=sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b \
- --hash=sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea \
- --hash=sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6 \
- --hash=sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920 \
- --hash=sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749 \
- --hash=sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7 \
- --hash=sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd \
- --hash=sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99 \
- --hash=sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242 \
- --hash=sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee \
- --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \
- --hash=sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2 \
- --hash=sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51 \
- --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \
- --hash=sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8 \
- --hash=sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b \
- --hash=sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613 \
- --hash=sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742 \
- --hash=sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe \
- --hash=sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3 \
- --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \
- --hash=sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631 \
- --hash=sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7 \
- --hash=sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15 \
- --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \
- --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \
- --hash=sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417 \
- --hash=sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250 \
- --hash=sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88 \
- --hash=sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca \
- --hash=sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa \
- --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \
- --hash=sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149 \
- --hash=sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41 \
- --hash=sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574 \
- --hash=sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0 \
- --hash=sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f \
- --hash=sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d \
- --hash=sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654 \
- --hash=sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3 \
- --hash=sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19 \
- --hash=sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90 \
- --hash=sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578 \
- --hash=sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9 \
- --hash=sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1 \
- --hash=sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51 \
- --hash=sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719 \
- --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \
- --hash=sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a \
- --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \
- --hash=sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade \
- --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \
- --hash=sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc \
- --hash=sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6 \
- --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \
- --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \
- --hash=sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6 \
- --hash=sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2 \
- --hash=sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12 \
- --hash=sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf \
- --hash=sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114 \
- --hash=sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7 \
- --hash=sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf \
- --hash=sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d \
- --hash=sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b \
- --hash=sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed \
- --hash=sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03 \
- --hash=sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4 \
- --hash=sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67 \
- --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \
- --hash=sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a \
- --hash=sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748 \
- --hash=sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b \
- --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \
- --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482
+charset-normalizer==3.4.1 \
+ --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \
+ --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \
+ --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \
+ --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \
+ --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \
+ --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \
+ --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \
+ --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \
+ --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \
+ --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \
+ --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \
+ --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \
+ --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \
+ --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \
+ --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \
+ --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \
+ --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \
+ --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \
+ --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \
+ --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \
+ --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \
+ --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \
+ --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \
+ --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \
+ --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \
+ --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \
+ --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \
+ --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \
+ --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \
+ --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \
+ --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \
+ --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \
+ --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \
+ --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \
+ --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \
+ --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \
+ --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \
+ --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \
+ --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \
+ --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \
+ --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \
+ --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \
+ --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \
+ --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \
+ --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \
+ --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \
+ --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \
+ --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \
+ --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \
+ --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \
+ --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \
+ --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \
+ --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \
+ --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \
+ --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \
+ --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \
+ --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \
+ --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \
+ --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \
+ --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \
+ --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \
+ --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \
+ --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \
+ --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \
+ --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \
+ --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \
+ --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \
+ --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \
+ --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \
+ --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \
+ --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \
+ --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \
+ --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \
+ --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \
+ --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \
+ --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \
+ --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \
+ --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \
+ --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \
+ --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \
+ --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \
+ --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \
+ --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \
+ --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \
+ --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \
+ --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \
+ --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \
+ --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \
+ --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \
+ --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \
+ --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \
+ --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616
# via requests
docutils==0.21.2 \
--hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \
diff --git a/tools/publish/requirements_linux.txt b/tools/publish/requirements_linux.txt
index 892b8b26b3..90b07d4c97 100644
--- a/tools/publish/requirements_linux.txt
+++ b/tools/publish/requirements_linux.txt
@@ -79,112 +79,99 @@ cffi==1.17.1 \
--hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \
--hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b
# via cryptography
-charset-normalizer==3.4.0 \
- --hash=sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621 \
- --hash=sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6 \
- --hash=sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8 \
- --hash=sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912 \
- --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \
- --hash=sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b \
- --hash=sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d \
- --hash=sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d \
- --hash=sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95 \
- --hash=sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e \
- --hash=sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565 \
- --hash=sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64 \
- --hash=sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab \
- --hash=sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be \
- --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \
- --hash=sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907 \
- --hash=sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0 \
- --hash=sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2 \
- --hash=sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62 \
- --hash=sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62 \
- --hash=sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23 \
- --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \
- --hash=sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284 \
- --hash=sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca \
- --hash=sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455 \
- --hash=sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858 \
- --hash=sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b \
- --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \
- --hash=sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc \
- --hash=sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db \
- --hash=sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b \
- --hash=sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea \
- --hash=sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6 \
- --hash=sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920 \
- --hash=sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749 \
- --hash=sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7 \
- --hash=sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd \
- --hash=sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99 \
- --hash=sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242 \
- --hash=sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee \
- --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \
- --hash=sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2 \
- --hash=sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51 \
- --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \
- --hash=sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8 \
- --hash=sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b \
- --hash=sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613 \
- --hash=sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742 \
- --hash=sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe \
- --hash=sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3 \
- --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \
- --hash=sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631 \
- --hash=sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7 \
- --hash=sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15 \
- --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \
- --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \
- --hash=sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417 \
- --hash=sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250 \
- --hash=sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88 \
- --hash=sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca \
- --hash=sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa \
- --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \
- --hash=sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149 \
- --hash=sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41 \
- --hash=sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574 \
- --hash=sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0 \
- --hash=sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f \
- --hash=sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d \
- --hash=sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654 \
- --hash=sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3 \
- --hash=sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19 \
- --hash=sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90 \
- --hash=sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578 \
- --hash=sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9 \
- --hash=sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1 \
- --hash=sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51 \
- --hash=sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719 \
- --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \
- --hash=sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a \
- --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \
- --hash=sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade \
- --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \
- --hash=sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc \
- --hash=sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6 \
- --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \
- --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \
- --hash=sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6 \
- --hash=sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2 \
- --hash=sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12 \
- --hash=sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf \
- --hash=sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114 \
- --hash=sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7 \
- --hash=sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf \
- --hash=sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d \
- --hash=sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b \
- --hash=sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed \
- --hash=sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03 \
- --hash=sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4 \
- --hash=sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67 \
- --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \
- --hash=sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a \
- --hash=sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748 \
- --hash=sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b \
- --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \
- --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482
+charset-normalizer==3.4.1 \
+ --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \
+ --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \
+ --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \
+ --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \
+ --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \
+ --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \
+ --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \
+ --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \
+ --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \
+ --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \
+ --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \
+ --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \
+ --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \
+ --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \
+ --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \
+ --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \
+ --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \
+ --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \
+ --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \
+ --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \
+ --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \
+ --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \
+ --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \
+ --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \
+ --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \
+ --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \
+ --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \
+ --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \
+ --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \
+ --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \
+ --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \
+ --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \
+ --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \
+ --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \
+ --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \
+ --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \
+ --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \
+ --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \
+ --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \
+ --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \
+ --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \
+ --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \
+ --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \
+ --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \
+ --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \
+ --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \
+ --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \
+ --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \
+ --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \
+ --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \
+ --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \
+ --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \
+ --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \
+ --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \
+ --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \
+ --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \
+ --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \
+ --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \
+ --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \
+ --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \
+ --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \
+ --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \
+ --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \
+ --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \
+ --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \
+ --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \
+ --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \
+ --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \
+ --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \
+ --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \
+ --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \
+ --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \
+ --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \
+ --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \
+ --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \
+ --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \
+ --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \
+ --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \
+ --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \
+ --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \
+ --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \
+ --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \
+ --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \
+ --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \
+ --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \
+ --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \
+ --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \
+ --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \
+ --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \
+ --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \
+ --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \
+ --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616
# via requests
cryptography==43.0.3 \
--hash=sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362 \
diff --git a/tools/publish/requirements_universal.txt b/tools/publish/requirements_universal.txt
index 337073ac25..9b145fce49 100644
--- a/tools/publish/requirements_universal.txt
+++ b/tools/publish/requirements_universal.txt
@@ -79,112 +79,99 @@ cffi==1.17.1 ; platform_python_implementation != 'PyPy' and sys_platform == 'lin
--hash=sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87 \
--hash=sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b
# via cryptography
-charset-normalizer==3.4.0 \
- --hash=sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621 \
- --hash=sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6 \
- --hash=sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8 \
- --hash=sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912 \
- --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \
- --hash=sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b \
- --hash=sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d \
- --hash=sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d \
- --hash=sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95 \
- --hash=sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e \
- --hash=sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565 \
- --hash=sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64 \
- --hash=sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab \
- --hash=sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be \
- --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \
- --hash=sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907 \
- --hash=sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0 \
- --hash=sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2 \
- --hash=sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62 \
- --hash=sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62 \
- --hash=sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23 \
- --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \
- --hash=sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284 \
- --hash=sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca \
- --hash=sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455 \
- --hash=sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858 \
- --hash=sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b \
- --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \
- --hash=sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc \
- --hash=sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db \
- --hash=sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b \
- --hash=sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea \
- --hash=sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6 \
- --hash=sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920 \
- --hash=sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749 \
- --hash=sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7 \
- --hash=sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd \
- --hash=sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99 \
- --hash=sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242 \
- --hash=sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee \
- --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \
- --hash=sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2 \
- --hash=sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51 \
- --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \
- --hash=sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8 \
- --hash=sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b \
- --hash=sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613 \
- --hash=sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742 \
- --hash=sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe \
- --hash=sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3 \
- --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \
- --hash=sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631 \
- --hash=sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7 \
- --hash=sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15 \
- --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \
- --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \
- --hash=sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417 \
- --hash=sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250 \
- --hash=sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88 \
- --hash=sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca \
- --hash=sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa \
- --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \
- --hash=sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149 \
- --hash=sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41 \
- --hash=sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574 \
- --hash=sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0 \
- --hash=sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f \
- --hash=sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d \
- --hash=sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654 \
- --hash=sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3 \
- --hash=sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19 \
- --hash=sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90 \
- --hash=sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578 \
- --hash=sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9 \
- --hash=sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1 \
- --hash=sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51 \
- --hash=sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719 \
- --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \
- --hash=sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a \
- --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \
- --hash=sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade \
- --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \
- --hash=sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc \
- --hash=sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6 \
- --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \
- --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \
- --hash=sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6 \
- --hash=sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2 \
- --hash=sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12 \
- --hash=sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf \
- --hash=sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114 \
- --hash=sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7 \
- --hash=sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf \
- --hash=sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d \
- --hash=sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b \
- --hash=sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed \
- --hash=sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03 \
- --hash=sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4 \
- --hash=sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67 \
- --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \
- --hash=sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a \
- --hash=sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748 \
- --hash=sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b \
- --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \
- --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482
+charset-normalizer==3.4.1 \
+ --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \
+ --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \
+ --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \
+ --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \
+ --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \
+ --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \
+ --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \
+ --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \
+ --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \
+ --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \
+ --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \
+ --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \
+ --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \
+ --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \
+ --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \
+ --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \
+ --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \
+ --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \
+ --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \
+ --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \
+ --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \
+ --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \
+ --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \
+ --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \
+ --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \
+ --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \
+ --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \
+ --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \
+ --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \
+ --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \
+ --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \
+ --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \
+ --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \
+ --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \
+ --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \
+ --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \
+ --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \
+ --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \
+ --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \
+ --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \
+ --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \
+ --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \
+ --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \
+ --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \
+ --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \
+ --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \
+ --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \
+ --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \
+ --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \
+ --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \
+ --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \
+ --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \
+ --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \
+ --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \
+ --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \
+ --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \
+ --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \
+ --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \
+ --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \
+ --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \
+ --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \
+ --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \
+ --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \
+ --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \
+ --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \
+ --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \
+ --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \
+ --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \
+ --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \
+ --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \
+ --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \
+ --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \
+ --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \
+ --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \
+ --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \
+ --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \
+ --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \
+ --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \
+ --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \
+ --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \
+ --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \
+ --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \
+ --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \
+ --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \
+ --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \
+ --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \
+ --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \
+ --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \
+ --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \
+ --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \
+ --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \
+ --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616
# via requests
cryptography==43.0.3 ; sys_platform == 'linux' \
--hash=sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362 \
diff --git a/tools/publish/requirements_windows.txt b/tools/publish/requirements_windows.txt
index 1c6b9808fb..1980812d15 100644
--- a/tools/publish/requirements_windows.txt
+++ b/tools/publish/requirements_windows.txt
@@ -10,112 +10,99 @@ certifi==2025.1.31 \
--hash=sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651 \
--hash=sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe
# via requests
-charset-normalizer==3.4.0 \
- --hash=sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621 \
- --hash=sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6 \
- --hash=sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8 \
- --hash=sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912 \
- --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \
- --hash=sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b \
- --hash=sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d \
- --hash=sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d \
- --hash=sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95 \
- --hash=sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e \
- --hash=sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565 \
- --hash=sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64 \
- --hash=sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab \
- --hash=sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be \
- --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \
- --hash=sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907 \
- --hash=sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0 \
- --hash=sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2 \
- --hash=sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62 \
- --hash=sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62 \
- --hash=sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23 \
- --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \
- --hash=sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284 \
- --hash=sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca \
- --hash=sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455 \
- --hash=sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858 \
- --hash=sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b \
- --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \
- --hash=sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc \
- --hash=sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db \
- --hash=sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b \
- --hash=sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea \
- --hash=sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6 \
- --hash=sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920 \
- --hash=sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749 \
- --hash=sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7 \
- --hash=sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd \
- --hash=sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99 \
- --hash=sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242 \
- --hash=sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee \
- --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \
- --hash=sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2 \
- --hash=sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51 \
- --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \
- --hash=sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8 \
- --hash=sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b \
- --hash=sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613 \
- --hash=sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742 \
- --hash=sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe \
- --hash=sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3 \
- --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \
- --hash=sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631 \
- --hash=sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7 \
- --hash=sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15 \
- --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \
- --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \
- --hash=sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417 \
- --hash=sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250 \
- --hash=sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88 \
- --hash=sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca \
- --hash=sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa \
- --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \
- --hash=sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149 \
- --hash=sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41 \
- --hash=sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574 \
- --hash=sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0 \
- --hash=sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f \
- --hash=sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d \
- --hash=sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654 \
- --hash=sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3 \
- --hash=sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19 \
- --hash=sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90 \
- --hash=sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578 \
- --hash=sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9 \
- --hash=sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1 \
- --hash=sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51 \
- --hash=sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719 \
- --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \
- --hash=sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a \
- --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \
- --hash=sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade \
- --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \
- --hash=sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc \
- --hash=sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6 \
- --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \
- --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \
- --hash=sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6 \
- --hash=sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2 \
- --hash=sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12 \
- --hash=sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf \
- --hash=sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114 \
- --hash=sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7 \
- --hash=sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf \
- --hash=sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d \
- --hash=sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b \
- --hash=sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed \
- --hash=sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03 \
- --hash=sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4 \
- --hash=sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67 \
- --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \
- --hash=sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a \
- --hash=sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748 \
- --hash=sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b \
- --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \
- --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482
+charset-normalizer==3.4.1 \
+ --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \
+ --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \
+ --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \
+ --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \
+ --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \
+ --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \
+ --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \
+ --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \
+ --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \
+ --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \
+ --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \
+ --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \
+ --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \
+ --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \
+ --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \
+ --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \
+ --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \
+ --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \
+ --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \
+ --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \
+ --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \
+ --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \
+ --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \
+ --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \
+ --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \
+ --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \
+ --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \
+ --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \
+ --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \
+ --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \
+ --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \
+ --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \
+ --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \
+ --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \
+ --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \
+ --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \
+ --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \
+ --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \
+ --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \
+ --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \
+ --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \
+ --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \
+ --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \
+ --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \
+ --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \
+ --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \
+ --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \
+ --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \
+ --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \
+ --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \
+ --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \
+ --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \
+ --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \
+ --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \
+ --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \
+ --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \
+ --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \
+ --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \
+ --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \
+ --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \
+ --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \
+ --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \
+ --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \
+ --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \
+ --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \
+ --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \
+ --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \
+ --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \
+ --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \
+ --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \
+ --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \
+ --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \
+ --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \
+ --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \
+ --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \
+ --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \
+ --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \
+ --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \
+ --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \
+ --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \
+ --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \
+ --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \
+ --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \
+ --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \
+ --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \
+ --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \
+ --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \
+ --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \
+ --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \
+ --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \
+ --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \
+ --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616
# via requests
docutils==0.21.2 \
--hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \
From 97637d2451647561205b10494b410f3b6edc3f83 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 8 Apr 2025 10:46:55 +0900
Subject: [PATCH 015/156] build(deps): bump charset-normalizer from 3.4.0 to
3.4.1 in /docs (#2752)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps [charset-normalizer](https://github.com/jawah/charset_normalizer)
from 3.4.0 to 3.4.1.
Release notes
Sourced from charset-normalizer's
releases.
Version 3.4.1
🚀 We're still raising awareness around HTTP/2, and HTTP/3!
Did you know that Internet Explorer 11 shipped with an optional
HTTP/2 support back in 2013? also libcurl did ship it in 2014[...]
Using Requests today is the rough equivalent of using EOL Windows 8! We
promptly invite Python developers to look at the first drop-in
replacement for Requests, namely Niquests. Ship with
native WebSocket, SSE, Happy Eyeballs, DNS over HTTPS, and so on[...]
All of this while remaining compatible with all Requests prior plug-ins
/ add-ons.
It leverages charset-normalizer in a better way! Check it out, you
will gain up to being 3X faster and get a real/respectable support with
it.
3.4.1
(2024-12-24)
Changed
- Project metadata are now stored using
pyproject.toml
instead of setup.cfg
using setuptools as the build
backend.
- Enforce annotation delayed loading for a simpler and consistent
types in the project.
- Optional mypyc compilation upgraded to version 1.14 for Python >=
3.8
Added
- pre-commit configuration.
- noxfile.
Removed
build-requirements.txt
as per using
pyproject.toml
native build configuration.
bin/integration.py
and bin/serve.py
in
favor of downstream integration test (see noxfile).
setup.cfg
in favor of pyproject.toml
metadata configuration.
- Unused
utils.range_scan
function.
Fixed
- Converting content to Unicode bytes may insert
utf_8
instead of preferred utf-8
. (#572)
- Deprecation warning "'count' is passed as positional
argument" when converting to Unicode bytes on Python 3.13+
Changelog
Sourced from charset-normalizer's
changelog.
3.4.1
(2024-12-24)
Changed
- Project metadata are now stored using
pyproject.toml
instead of setup.cfg
using setuptools as the build
backend.
- Enforce annotation delayed loading for a simpler and consistent
types in the project.
- Optional mypyc compilation upgraded to version 1.14 for Python >=
3.8
Added
- pre-commit configuration.
- noxfile.
Removed
build-requirements.txt
as per using
pyproject.toml
native build configuration.
bin/integration.py
and bin/serve.py
in
favor of downstream integration test (see noxfile).
setup.cfg
in favor of pyproject.toml
metadata configuration.
- Unused
utils.range_scan
function.
Fixed
- Converting content to Unicode bytes may insert
utf_8
instead of preferred utf-8
. (#572)
- Deprecation warning "'count' is passed as positional
argument" when converting to Unicode bytes on Python 3.13+
Commits
ffdf7f5
:wrench: fix long description content-type inferred as rst instead of
md
c7197b7
:pencil: fix changelog entries (#582)
c390e1f
Merge pull request #581
from jawah/refresh-part-2
f9d6b8c
:lock: add CODEOWNERS
7ce1ef1
:wrench: use ubuntu-22.04 for cibuildwheel in continuous deployment
workflow
deed205
:wrench: update LICENSE copyright
f11f571
:wrench: include noxfile in sdist
1ec7c06
:wrench: update changelog
14b4649
:bug: output(...) replace declarative mark using non iana compliant
encoding ...
1b06bc0
Merge branch 'refresh-part-2' of github.com:jawah/charset_normalizer
into ref...
- Additional commits viewable in compare
view
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
docs/requirements.txt | 199 ++++++++++++++++++++----------------------
1 file changed, 93 insertions(+), 106 deletions(-)
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 66d41a963f..8d1cbabffc 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -22,112 +22,99 @@ certifi==2025.1.31 \
--hash=sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651 \
--hash=sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe
# via requests
-charset-normalizer==3.4.0 \
- --hash=sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621 \
- --hash=sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6 \
- --hash=sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8 \
- --hash=sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912 \
- --hash=sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c \
- --hash=sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b \
- --hash=sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d \
- --hash=sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d \
- --hash=sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95 \
- --hash=sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e \
- --hash=sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565 \
- --hash=sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64 \
- --hash=sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab \
- --hash=sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be \
- --hash=sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e \
- --hash=sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907 \
- --hash=sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0 \
- --hash=sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2 \
- --hash=sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62 \
- --hash=sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62 \
- --hash=sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23 \
- --hash=sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc \
- --hash=sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284 \
- --hash=sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca \
- --hash=sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455 \
- --hash=sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858 \
- --hash=sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b \
- --hash=sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594 \
- --hash=sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc \
- --hash=sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db \
- --hash=sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b \
- --hash=sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea \
- --hash=sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6 \
- --hash=sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920 \
- --hash=sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749 \
- --hash=sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7 \
- --hash=sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd \
- --hash=sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99 \
- --hash=sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242 \
- --hash=sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee \
- --hash=sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129 \
- --hash=sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2 \
- --hash=sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51 \
- --hash=sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee \
- --hash=sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8 \
- --hash=sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b \
- --hash=sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613 \
- --hash=sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742 \
- --hash=sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe \
- --hash=sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3 \
- --hash=sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5 \
- --hash=sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631 \
- --hash=sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7 \
- --hash=sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15 \
- --hash=sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c \
- --hash=sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea \
- --hash=sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417 \
- --hash=sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250 \
- --hash=sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88 \
- --hash=sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca \
- --hash=sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa \
- --hash=sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99 \
- --hash=sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149 \
- --hash=sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41 \
- --hash=sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574 \
- --hash=sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0 \
- --hash=sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f \
- --hash=sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d \
- --hash=sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654 \
- --hash=sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3 \
- --hash=sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19 \
- --hash=sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90 \
- --hash=sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578 \
- --hash=sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9 \
- --hash=sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1 \
- --hash=sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51 \
- --hash=sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719 \
- --hash=sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236 \
- --hash=sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a \
- --hash=sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c \
- --hash=sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade \
- --hash=sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944 \
- --hash=sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc \
- --hash=sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6 \
- --hash=sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6 \
- --hash=sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27 \
- --hash=sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6 \
- --hash=sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2 \
- --hash=sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12 \
- --hash=sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf \
- --hash=sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114 \
- --hash=sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7 \
- --hash=sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf \
- --hash=sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d \
- --hash=sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b \
- --hash=sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed \
- --hash=sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03 \
- --hash=sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4 \
- --hash=sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67 \
- --hash=sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365 \
- --hash=sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a \
- --hash=sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748 \
- --hash=sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b \
- --hash=sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079 \
- --hash=sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482
+charset-normalizer==3.4.1 \
+ --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \
+ --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \
+ --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \
+ --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \
+ --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \
+ --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \
+ --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \
+ --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \
+ --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \
+ --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \
+ --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \
+ --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \
+ --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \
+ --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \
+ --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \
+ --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \
+ --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \
+ --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \
+ --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \
+ --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \
+ --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \
+ --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \
+ --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \
+ --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \
+ --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \
+ --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \
+ --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \
+ --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \
+ --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \
+ --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \
+ --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \
+ --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \
+ --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \
+ --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \
+ --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \
+ --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \
+ --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \
+ --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \
+ --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \
+ --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \
+ --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \
+ --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \
+ --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \
+ --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \
+ --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \
+ --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \
+ --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \
+ --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \
+ --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \
+ --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \
+ --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \
+ --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \
+ --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \
+ --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \
+ --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \
+ --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \
+ --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \
+ --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \
+ --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \
+ --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \
+ --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \
+ --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \
+ --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \
+ --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \
+ --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \
+ --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \
+ --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \
+ --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \
+ --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \
+ --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \
+ --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \
+ --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \
+ --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \
+ --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \
+ --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \
+ --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \
+ --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \
+ --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \
+ --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \
+ --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \
+ --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \
+ --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \
+ --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \
+ --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \
+ --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \
+ --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \
+ --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \
+ --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \
+ --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \
+ --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \
+ --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \
+ --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616
# via requests
colorama==0.4.6 ; sys_platform == 'win32' \
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
From 2a710f07c2eafd5c6d32d4721ee4403a34769361 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 8 Apr 2025 10:47:08 +0900
Subject: [PATCH 016/156] build(deps): bump jinja2 from 3.1.4 to 3.1.6 in
/examples/pip_parse (#2754)
Bumps [jinja2](https://github.com/pallets/jinja) from 3.1.4 to 3.1.6.
Release notes
Sourced from jinja2's
releases.
3.1.6
This is the Jinja 3.1.6 security release, which fixes security issues
but does not otherwise change behavior and should not result in breaking
changes compared to the latest feature release.
PyPI: https://pypi.org/project/Jinja2/3.1.6/
Changes: https://jinja.palletsprojects.com/en/stable/changes/#version-3-1-6
3.1.5
This is the Jinja 3.1.5 security fix release, which fixes security
issues and bugs but does not otherwise change behavior and should not
result in breaking changes compared to the latest feature release.
PyPI: https://pypi.org/project/Jinja2/3.1.5/
Changes: https://jinja.palletsprojects.com/changes/#version-3-1-5
Milestone: https://github.com/pallets/jinja/milestone/16?closed=1
- The sandboxed environment handles indirect calls to
str.format
, such as by passing a stored reference to a
filter that calls its argument. GHSA-q2x7-8rv6-6q7h
- Escape template name before formatting it into error messages, to
avoid issues with names that contain f-string syntax. #1792,
GHSA-gmj6-6f8f-6699
- Sandbox does not allow
clear
and pop
on
known mutable sequence types. #2032
- Calling sync
render
for an async template uses
asyncio.run
. #1952
- Avoid unclosed
auto_aiter
warnings. #1960
- Return an
aclose
-able AsyncGenerator
from
Template.generate_async
. #1960
- Avoid leaving
root_render_func()
unclosed in
Template.generate_async
. #1960
- Avoid leaving async generators unclosed in blocks, includes and
extends. #1960
- The runtime uses the correct
concat
function for the
current environment when calling block references. #1701
- Make
|unique
async-aware, allowing it to be used after
another async-aware filter. #1781
|int
filter handles OverflowError
from
scientific notation. #1921
- Make compiling deterministic for tuple unpacking in a
{% set
... %}
call. #2021
- Fix dunder protocol (
copy
/pickle
/etc)
interaction with Undefined
objects. #2025
- Fix
copy
/pickle
support for the internal
missing
object. #2027
Environment.overlay(enable_async)
is applied correctly.
#2061
- The error message from
FileSystemLoader
includes the
paths that were searched. #1661
PackageLoader
shows a clearer error message when the
package does not contain the templates directory. #1705
- Improve annotations for methods returning copies. #1880
urlize
does not add mailto:
to values like
@a@b
. #1870
- Tests decorated with
@pass_context
can be used with the
|select
filter. #1624
- Using
set
for multiple assignment (a, b = 1,
2
) does not fail when the target is a namespace attribute. #1413
- Using
set
in all branches of {% if %}{% elif %}{%
else %}
blocks does not cause the variable to be considered
initially undefined. #1253
Changelog
Sourced from jinja2's
changelog.
Version 3.1.6
Released 2025-03-05
- The
|attr
filter does not bypass the environment's
attribute lookup,
allowing the sandbox to apply its checks.
:ghsa:cpwx-vrp4-4pq7
Version 3.1.5
Released 2024-12-21
- The sandboxed environment handles indirect calls to
str.format
, such as
by passing a stored reference to a filter that calls its argument.
:ghsa:q2x7-8rv6-6q7h
- Escape template name before formatting it into error messages, to
avoid
issues with names that contain f-string syntax.
:issue:
1792
, :ghsa:gmj6-6f8f-6699
- Sandbox does not allow
clear
and pop
on
known mutable sequence
types. :issue:2032
- Calling sync
render
for an async template uses
asyncio.run
.
:pr:1952
- Avoid unclosed
auto_aiter
warnings.
:pr:1960
- Return an
aclose
-able AsyncGenerator
from
Template.generate_async
. :pr:1960
- Avoid leaving
root_render_func()
unclosed in
Template.generate_async
. :pr:1960
- Avoid leaving async generators unclosed in blocks, includes and
extends.
:pr:
1960
- The runtime uses the correct
concat
function for the
current environment
when calling block references. :issue:1701
- Make
|unique
async-aware, allowing it to be used after
another
async-aware filter. :issue:1781
|int
filter handles OverflowError
from
scientific notation.
:issue:1921
- Make compiling deterministic for tuple unpacking in a
{% set
... %}
call. :issue:2021
- Fix dunder protocol (
copy
/pickle
/etc)
interaction with Undefined
objects. :issue:2025
- Fix
copy
/pickle
support for the internal
missing
object.
:issue:2027
Environment.overlay(enable_async)
is applied correctly.
:pr:2061
- The error message from
FileSystemLoader
includes the
paths that were
searched. :issue:1661
PackageLoader
shows a clearer error message when the
package does not
contain the templates directory. :issue:1705
- Improve annotations for methods returning copies.
:pr:
1880
urlize
does not add mailto:
to values like
@a@b
. :pr:1870
... (truncated)
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
You can disable automated security fix PRs for this repo from the
[Security Alerts
page](https://github.com/bazel-contrib/rules_python/network/alerts).
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
examples/pip_parse/requirements_lock.txt | 6 +++---
examples/pip_parse/requirements_windows.txt | 6 +++---
2 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/examples/pip_parse/requirements_lock.txt b/examples/pip_parse/requirements_lock.txt
index 5e7a198c38..aeac61eff9 100644
--- a/examples/pip_parse/requirements_lock.txt
+++ b/examples/pip_parse/requirements_lock.txt
@@ -36,9 +36,9 @@ importlib-metadata==6.8.0 \
--hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \
--hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743
# via sphinx
-jinja2==3.1.4 \
- --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \
- --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d
+jinja2==3.1.6 \
+ --hash=sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d \
+ --hash=sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67
# via sphinx
markupsafe==2.1.3 \
--hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \
diff --git a/examples/pip_parse/requirements_windows.txt b/examples/pip_parse/requirements_windows.txt
index 4b1969255a..61a6682047 100644
--- a/examples/pip_parse/requirements_windows.txt
+++ b/examples/pip_parse/requirements_windows.txt
@@ -40,9 +40,9 @@ importlib-metadata==6.8.0 \
--hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \
--hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743
# via sphinx
-jinja2==3.1.4 \
- --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \
- --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d
+jinja2==3.1.6 \
+ --hash=sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d \
+ --hash=sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67
# via sphinx
markupsafe==2.1.3 \
--hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \
From 6821709d7c79e9a1156287d06522de674e5c376d Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 8 Apr 2025 02:21:43 +0000
Subject: [PATCH 017/156] build(deps): bump cryptography from 43.0.3 to 44.0.1
in /tools/publish (#2756)
Bumps [cryptography](https://github.com/pyca/cryptography) from 43.0.3
to 44.0.1.
Changelog
Sourced from cryptography's
changelog.
44.0.1 - 2025-02-11
* Updated Windows, macOS, and Linux wheels to be compiled with OpenSSL
3.4.1.
* We now build ``armv7l`` ``manylinux`` wheels and publish them to PyPI.
* We now build ``manylinux_2_34`` wheels and publish them to PyPI.
.. _v44-0-0:
44.0.0 - 2024-11-27
- BACKWARDS INCOMPATIBLE: Dropped support for
LibreSSL < 3.9.
- Deprecated Python 3.7 support. Python 3.7 is no longer supported by
the
Python core team. Support for Python 3.7 will be removed in a future
cryptography
release.
- Updated Windows, macOS, and Linux wheels to be compiled with OpenSSL
3.4.0.
- macOS wheels are now built against the macOS 10.13 SDK. Users on
older
versions of macOS should upgrade, or they will need to build
cryptography
themselves.
- Enforce the :rfc:
5280
requirement that extended key
usage extensions must
not be empty.
- Added support for timestamp extraction to the
:class:
~cryptography.fernet.MultiFernet
class.
- Relax the Authority Key Identifier requirements on root CA
certificates
during X.509 verification to allow fields permitted by
:rfc:
5280
but
forbidden by the CA/Browser BRs.
- Added support for
:class:
~cryptography.hazmat.primitives.kdf.argon2.Argon2id
when using OpenSSL 3.2.0+.
- Added support for the
:class:
~cryptography.x509.Admissions
certificate
extension.
- Added basic support for PKCS7 decryption (including S/MIME 3.2) via
:func:
~cryptography.hazmat.primitives.serialization.pkcs7.pkcs7_decrypt_der
,
:func:~cryptography.hazmat.primitives.serialization.pkcs7.pkcs7_decrypt_pem
,
and
:func:~cryptography.hazmat.primitives.serialization.pkcs7.pkcs7_decrypt_smime
.
.. _v43-0-3:
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
You can disable automated security fix PRs for this repo from the
[Security Alerts
page](https://github.com/bazel-contrib/rules_python/network/alerts).
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
tools/publish/requirements_linux.txt | 60 +++++++++++++-----------
tools/publish/requirements_universal.txt | 60 +++++++++++++-----------
2 files changed, 64 insertions(+), 56 deletions(-)
diff --git a/tools/publish/requirements_linux.txt b/tools/publish/requirements_linux.txt
index 90b07d4c97..40d987b16d 100644
--- a/tools/publish/requirements_linux.txt
+++ b/tools/publish/requirements_linux.txt
@@ -173,34 +173,38 @@ charset-normalizer==3.4.1 \
--hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \
--hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616
# via requests
-cryptography==43.0.3 \
- --hash=sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362 \
- --hash=sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4 \
- --hash=sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa \
- --hash=sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83 \
- --hash=sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff \
- --hash=sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805 \
- --hash=sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6 \
- --hash=sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664 \
- --hash=sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08 \
- --hash=sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e \
- --hash=sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18 \
- --hash=sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f \
- --hash=sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73 \
- --hash=sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5 \
- --hash=sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984 \
- --hash=sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd \
- --hash=sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3 \
- --hash=sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e \
- --hash=sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405 \
- --hash=sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2 \
- --hash=sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c \
- --hash=sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995 \
- --hash=sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73 \
- --hash=sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16 \
- --hash=sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7 \
- --hash=sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd \
- --hash=sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7
+cryptography==44.0.1 \
+ --hash=sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7 \
+ --hash=sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3 \
+ --hash=sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183 \
+ --hash=sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69 \
+ --hash=sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a \
+ --hash=sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62 \
+ --hash=sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911 \
+ --hash=sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7 \
+ --hash=sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a \
+ --hash=sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41 \
+ --hash=sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83 \
+ --hash=sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12 \
+ --hash=sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864 \
+ --hash=sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf \
+ --hash=sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c \
+ --hash=sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2 \
+ --hash=sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b \
+ --hash=sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0 \
+ --hash=sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4 \
+ --hash=sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9 \
+ --hash=sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008 \
+ --hash=sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862 \
+ --hash=sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009 \
+ --hash=sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7 \
+ --hash=sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f \
+ --hash=sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026 \
+ --hash=sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f \
+ --hash=sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd \
+ --hash=sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420 \
+ --hash=sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14 \
+ --hash=sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00
# via secretstorage
docutils==0.21.2 \
--hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \
diff --git a/tools/publish/requirements_universal.txt b/tools/publish/requirements_universal.txt
index 9b145fce49..c8bc0bb258 100644
--- a/tools/publish/requirements_universal.txt
+++ b/tools/publish/requirements_universal.txt
@@ -173,34 +173,38 @@ charset-normalizer==3.4.1 \
--hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \
--hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616
# via requests
-cryptography==43.0.3 ; sys_platform == 'linux' \
- --hash=sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362 \
- --hash=sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4 \
- --hash=sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa \
- --hash=sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83 \
- --hash=sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff \
- --hash=sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805 \
- --hash=sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6 \
- --hash=sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664 \
- --hash=sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08 \
- --hash=sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e \
- --hash=sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18 \
- --hash=sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f \
- --hash=sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73 \
- --hash=sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5 \
- --hash=sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984 \
- --hash=sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd \
- --hash=sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3 \
- --hash=sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e \
- --hash=sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405 \
- --hash=sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2 \
- --hash=sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c \
- --hash=sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995 \
- --hash=sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73 \
- --hash=sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16 \
- --hash=sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7 \
- --hash=sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd \
- --hash=sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7
+cryptography==44.0.1 ; sys_platform == 'linux' \
+ --hash=sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7 \
+ --hash=sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3 \
+ --hash=sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183 \
+ --hash=sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69 \
+ --hash=sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a \
+ --hash=sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62 \
+ --hash=sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911 \
+ --hash=sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7 \
+ --hash=sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a \
+ --hash=sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41 \
+ --hash=sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83 \
+ --hash=sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12 \
+ --hash=sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864 \
+ --hash=sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf \
+ --hash=sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c \
+ --hash=sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2 \
+ --hash=sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b \
+ --hash=sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0 \
+ --hash=sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4 \
+ --hash=sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9 \
+ --hash=sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008 \
+ --hash=sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862 \
+ --hash=sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009 \
+ --hash=sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7 \
+ --hash=sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f \
+ --hash=sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026 \
+ --hash=sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f \
+ --hash=sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd \
+ --hash=sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420 \
+ --hash=sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14 \
+ --hash=sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00
# via secretstorage
docutils==0.21.2 \
--hash=sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f \
From 34e433b75373aa9ad5645f370a0e0a4025e328da Mon Sep 17 00:00:00 2001
From: Richard Levasseur
Date: Tue, 8 Apr 2025 22:43:06 -0700
Subject: [PATCH 018/156] feat(toolchains): create toolchains from locally
installed python (#2742)
This adds docs and public APIs for using a locally installed python for
a toolchain.
Work towards https://github.com/bazel-contrib/rules_python/issues/2070
---------
Co-authored-by: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
---
CHANGELOG.md | 4 +
docs/BUILD.bazel | 1 +
docs/toolchains.md | 97 ++++++++++++++++++-
python/BUILD.bazel | 1 +
python/local_toolchains/BUILD.bazel | 18 ++++
python/local_toolchains/repos.bzl | 18 ++++
python/private/BUILD.bazel | 18 ++++
.../integration/local_toolchains/MODULE.bazel | 4 +-
8 files changed, 155 insertions(+), 6 deletions(-)
create mode 100644 python/local_toolchains/BUILD.bazel
create mode 100644 python/local_toolchains/repos.bzl
diff --git a/CHANGELOG.md b/CHANGELOG.md
index abe718c389..7aeb135788 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -116,6 +116,10 @@ Unreleased changes template.
allow specifying links to create within the venv site packages (only
applicable with {obj}`--bootstrap_impl=script`)
([#2156](https://github.com/bazelbuild/rules_python/issues/2156)).
+* (toolchains) Local Python installs can be used to create a toolchain
+ equivalent to the standard toolchains. See [Local toolchains] docs for how to
+ configure them.
+
{#v0-0-0-removed}
### Removed
diff --git a/docs/BUILD.bazel b/docs/BUILD.bazel
index 29eac6e714..25da682012 100644
--- a/docs/BUILD.bazel
+++ b/docs/BUILD.bazel
@@ -108,6 +108,7 @@ sphinx_stardocs(
"//python/cc:py_cc_toolchain_bzl",
"//python/cc:py_cc_toolchain_info_bzl",
"//python/entry_points:py_console_script_binary_bzl",
+ "//python/local_toolchains:repos_bzl",
"//python/private:attr_builders_bzl",
"//python/private:builders_util_bzl",
"//python/private:py_binary_rule_bzl",
diff --git a/docs/toolchains.md b/docs/toolchains.md
index 73a8a48121..5cd9eb268e 100644
--- a/docs/toolchains.md
+++ b/docs/toolchains.md
@@ -199,10 +199,10 @@ Remember to call `use_repo()` to make repos visible to your module:
:::{deprecated} 1.1.0
-The toolchain specific `py_binary` and `py_test` symbols are aliases to the regular rules.
+The toolchain specific `py_binary` and `py_test` symbols are aliases to the regular rules.
i.e. Deprecated `load("@python_versions//3.11:defs.bzl", "py_binary")` & `load("@python_versions//3.11:defs.bzl", "py_test")`
-Usages of them should be changed to load the regular rules directly;
+Usages of them should be changed to load the regular rules directly;
i.e. Use `load("@rules_python//python:py_binary.bzl", "py_binary")` & `load("@rules_python//python:py_test.bzl", "py_test")` and then specify the `python_version` when using the rules corresponding to the python version you defined in your toolchain. {ref}`Library modules with version constraints`
:::
@@ -327,7 +327,97 @@ After registration, your Python targets will use the toolchain's interpreter dur
is still used to 'bootstrap' Python targets (see https://github.com/bazel-contrib/rules_python/issues/691).
You may also find some quirks while using this toolchain. Please refer to [python-build-standalone documentation's _Quirks_ section](https://gregoryszorc.com/docs/python-build-standalone/main/quirks.html).
-## Autodetecting toolchain
+## Local toolchain
+
+It's possible to use a locally installed Python runtime instead of the regular
+prebuilt, remotely downloaded ones. A local toolchain contains the Python
+runtime metadata (Python version, headers, ABI flags, etc) that the regular
+remotely downloaded runtimes contain, which makes it possible to build e.g. C
+extensions (unlike the autodetecting and runtime environment toolchains).
+
+For simple cases, some rules are provided that will introspect
+a Python installation and create an appropriate Bazel definition from
+it. To do this, three pieces need to be wired together:
+
+1. Specify a path or command to a Python interpreter (multiple can be defined).
+2. Create toolchains for the runtimes in (1)
+3. Register the toolchains created by (2)
+
+The below is an example that will use `python3` from PATH to find the
+interpreter, then introspect its installation to generate a full toolchain.
+
+```starlark
+# File: MODULE.bazel
+
+local_runtime_repo = use_repo_rule(
+ "@rules_python//python/local_toolchains:repos.bzl",
+ "local_runtime_repo",
+ dev_dependency = True,
+)
+
+local_runtime_toolchains_repo = use_repo_rule(
+ "@rules_python//python/local_toolchains:repos.bzl"
+ "local_runtime_toolchains_repo"
+ dev_dependency = True,
+)
+
+# Step 1: Define the Python runtime
+local_runtime_repo(
+ name = "local_python3",
+ interpreter_path = "python3",
+ on_failure = "fail",
+)
+
+# Step 2: Create toolchains for the runtimes
+local_runtime_toolchains_repo(
+ name = "local_toolchains",
+ runtimes = ["local_python3"],
+)
+
+# Step 3: Register the toolchains
+register_toolchains("@local_toolchains//:all", dev_dependency = True)
+```
+
+Note that `register_toolchains` will insert the local toolchain earlier in the
+toolchain ordering, so it will take precedence over other registered toolchains.
+
+:::{important}
+Be sure to set `dev_dependency = True`. Using a local toolchain only makes sense
+for the root module.
+
+If an intermediate module does it, then the `register_toolchains()` call will
+take precedence over the default rules_python toolchains and cause problems for
+downstream modules.
+:::
+
+Multiple runtimes and/or toolchains can be defined, which allows for multiple
+Python versions and/or platforms to be configured in a single `MODULE.bazel`.
+
+## Runtime environment toolchain
+
+The runtime environment toolchain is a minimal toolchain that doesn't provide
+information about Python at build time. In particular, this means it is not able
+to build C extensions -- doing so requires knowing, at build time, what Python
+headers to use.
+
+In effect, all it does is generate a small wrapper script that simply calls e.g.
+`/usr/bin/env python3` to run a program. This makes it easy to change what
+Python is used to run a program, but also makes it easy to use a Python version
+that isn't compatible with build-time assumptions.
+
+```
+register_toolchains("@rules_python//python/runtime_env_toolchains:all")
+```
+
+Note that this toolchain has no constraints, i.e. it will match any platform,
+Python version, etc.
+
+:::{seealso}
+[Local toolchain], which creates a more full featured toolchain from a
+locally installed Python.
+:::
+
+### Autodetecting toolchain
The autodetecting toolchain is a deprecated toolchain that is built into Bazel.
It's name is a bit misleading: it doesn't autodetect anything. All it does is
@@ -345,7 +435,6 @@ To aid migration off the Bazel-builtin toolchain, rules_python provides
{bzl:obj}`@rules_python//python/runtime_env_toolchains:all`. This is an equivalent
toolchain, but is implemented using rules_python's objects.
-
## Custom toolchains
While rules_python provides toolchains by default, it is not required to use
diff --git a/python/BUILD.bazel b/python/BUILD.bazel
index a699c81cc4..3389a0dacc 100644
--- a/python/BUILD.bazel
+++ b/python/BUILD.bazel
@@ -41,6 +41,7 @@ filegroup(
"//python/constraints:distribution",
"//python/entry_points:distribution",
"//python/extensions:distribution",
+ "//python/local_toolchains:distribution",
"//python/pip_install:distribution",
"//python/private:distribution",
"//python/runfiles:distribution",
diff --git a/python/local_toolchains/BUILD.bazel b/python/local_toolchains/BUILD.bazel
new file mode 100644
index 0000000000..211f3e21a7
--- /dev/null
+++ b/python/local_toolchains/BUILD.bazel
@@ -0,0 +1,18 @@
+load("@bazel_skylib//:bzl_library.bzl", "bzl_library")
+
+package(default_visibility = ["//:__subpackages__"])
+
+bzl_library(
+ name = "repos_bzl",
+ srcs = ["repos.bzl"],
+ visibility = ["//visibility:public"],
+ deps = [
+ "//python/private:local_runtime_repo_bzl",
+ "//python/private:local_runtime_toolchains_repo_bzl",
+ ],
+)
+
+filegroup(
+ name = "distribution",
+ srcs = glob(["**"]),
+)
diff --git a/python/local_toolchains/repos.bzl b/python/local_toolchains/repos.bzl
new file mode 100644
index 0000000000..d1b45cfd7f
--- /dev/null
+++ b/python/local_toolchains/repos.bzl
@@ -0,0 +1,18 @@
+"""Rules/macros for repository phase for local toolchains.
+
+:::{versionadded} VERSION_NEXT_FEATURE
+:::
+"""
+
+load(
+ "@rules_python//python/private:local_runtime_repo.bzl",
+ _local_runtime_repo = "local_runtime_repo",
+)
+load(
+ "@rules_python//python/private:local_runtime_toolchains_repo.bzl",
+ _local_runtime_toolchains_repo = "local_runtime_toolchains_repo",
+)
+
+local_runtime_repo = _local_runtime_repo
+
+local_runtime_toolchains_repo = _local_runtime_toolchains_repo
diff --git a/python/private/BUILD.bazel b/python/private/BUILD.bazel
index ef4580e1ce..b63f446be3 100644
--- a/python/private/BUILD.bazel
+++ b/python/private/BUILD.bazel
@@ -205,6 +205,24 @@ bzl_library(
],
)
+bzl_library(
+ name = "local_runtime_repo_bzl",
+ srcs = ["local_runtime_repo.bzl"],
+ deps = [
+ ":enum_bzl",
+ ":repo_utils.bzl",
+ ],
+)
+
+bzl_library(
+ name = "local_runtime_toolchains_repo_bzl",
+ srcs = ["local_runtime_toolchains_repo.bzl"],
+ deps = [
+ ":repo_utils.bzl",
+ ":text_util_bzl",
+ ],
+)
+
bzl_library(
name = "normalize_name_bzl",
srcs = ["normalize_name.bzl"],
diff --git a/tests/integration/local_toolchains/MODULE.bazel b/tests/integration/local_toolchains/MODULE.bazel
index d4ef12e952..98f1ed9ac4 100644
--- a/tests/integration/local_toolchains/MODULE.bazel
+++ b/tests/integration/local_toolchains/MODULE.bazel
@@ -19,9 +19,9 @@ local_path_override(
path = "../../..",
)
-local_runtime_repo = use_repo_rule("@rules_python//python/private:local_runtime_repo.bzl", "local_runtime_repo")
+local_runtime_repo = use_repo_rule("@rules_python//python/local_toolchains:repos.bzl", "local_runtime_repo")
-local_runtime_toolchains_repo = use_repo_rule("@rules_python//python/private:local_runtime_toolchains_repo.bzl", "local_runtime_toolchains_repo")
+local_runtime_toolchains_repo = use_repo_rule("@rules_python//python/local_toolchains:repos.bzl", "local_runtime_toolchains_repo")
local_runtime_repo(
name = "local_python3",
From 9fb13ec1af33ecc9da8beb7dcea7bb25b4dbc241 Mon Sep 17 00:00:00 2001
From: Matt Mackay
Date: Wed, 9 Apr 2025 08:37:57 -0400
Subject: [PATCH 019/156] fix: run python version call in isolated mode (#2761)
Similar to https://github.com/bazel-contrib/rules_python/pull/2738, runs
the call to get the Python interpreter version in isolated mode via
`-I`, ensuring userland Python variables do not affect this call.
---
CHANGELOG.md | 1 +
python/private/pypi/whl_library.bzl | 4 ++++
2 files changed, 5 insertions(+)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 7aeb135788..f38732f7d8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -91,6 +91,7 @@ Unreleased changes template.
Fixes [#2685](https://github.com/bazel-contrib/rules_python/issues/2685).
* (toolchains) Run the check on the Python interpreter in isolated mode, to ensure it's not affected by userland environment variables, such as `PYTHONPATH`.
* (toolchains) Ensure temporary `.pyc` and `.pyo` files are also excluded from the interpreters repository files.
+* (pypi) Run interpreter version call in isolated mode, to ensure it's not affected by userland environment variables, such as `PYTHONPATH`.
{#v0-0-0-added}
### Added
diff --git a/python/private/pypi/whl_library.bzl b/python/private/pypi/whl_library.bzl
index 2904f85f1b..493f11353e 100644
--- a/python/private/pypi/whl_library.bzl
+++ b/python/private/pypi/whl_library.bzl
@@ -109,6 +109,10 @@ def _get_toolchain_unix_cflags(rctx, python_interpreter, logger = None):
op = "GetPythonVersionForUnixCflags",
python = python_interpreter,
arguments = [
+ # Run the interpreter in isolated mode, this options implies -E, -P and -s.
+ # Ensures environment variables are ignored that are set in userspace, such as PYTHONPATH,
+ # which may interfere with this invocation.
+ "-I",
"-c",
"import sys; print(f'{sys.version_info[0]}.{sys.version_info[1]}', end='')",
],
From 55d68369e37da847ee8ac2be0358ef4969f1b194 Mon Sep 17 00:00:00 2001
From: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
Date: Fri, 11 Apr 2025 03:43:17 +0900
Subject: [PATCH 020/156] fix(pypi): fixes to the marker evaluation and utils
(#2767)
These are just bugfixes to already merged code:
* Fix nested bracket parsing in PEP508 marker parser.
* Fix the sys_platform constants, which I noticed in #2629 but they got
also pointed out in #2766.
* Port some of python tests for requirement parsing and improve the
implementation. Those tests will be removed in #2629.
* Move the platform related code to a separate file.
* Rename `pep508_req.bzl` to `pep508_requirement.bzl` to follow the
convention.
All of the bug fixes have added tests.
Work towards #2423.
---
python/private/pypi/BUILD.bazel | 15 ++++-
python/private/pypi/evaluate_markers.bzl | 9 +--
python/private/pypi/pep508_env.bzl | 63 ++++++++++---------
python/private/pypi/pep508_platform.bzl | 57 +++++++++++++++++
...{pep508_req.bzl => pep508_requirement.bzl} | 9 ++-
tests/pypi/pep508/BUILD.bazel | 5 ++
tests/pypi/pep508/requirement_tests.bzl | 47 ++++++++++++++
7 files changed, 165 insertions(+), 40 deletions(-)
create mode 100644 python/private/pypi/pep508_platform.bzl
rename python/private/pypi/{pep508_req.bzl => pep508_requirement.bzl} (82%)
create mode 100644 tests/pypi/pep508/requirement_tests.bzl
diff --git a/python/private/pypi/BUILD.bazel b/python/private/pypi/BUILD.bazel
index 21e05f2895..e0a2f20c14 100644
--- a/python/private/pypi/BUILD.bazel
+++ b/python/private/pypi/BUILD.bazel
@@ -77,7 +77,8 @@ bzl_library(
deps = [
":pep508_env_bzl",
":pep508_evaluate_bzl",
- ":pep508_req_bzl",
+ ":pep508_platform_bzl",
+ ":pep508_requirement_bzl",
],
)
@@ -223,6 +224,9 @@ bzl_library(
bzl_library(
name = "pep508_env_bzl",
srcs = ["pep508_env.bzl"],
+ deps = [
+ ":pep508_platform_bzl",
+ ],
)
bzl_library(
@@ -235,8 +239,13 @@ bzl_library(
)
bzl_library(
- name = "pep508_req_bzl",
- srcs = ["pep508_req.bzl"],
+ name = "pep508_platform_bzl",
+ srcs = ["pep508_platform.bzl"],
+)
+
+bzl_library(
+ name = "pep508_requirement_bzl",
+ srcs = ["pep508_requirement.bzl"],
deps = [
"//python/private:normalize_name_bzl",
],
diff --git a/python/private/pypi/evaluate_markers.bzl b/python/private/pypi/evaluate_markers.bzl
index 1d4c30753f..a0223abdc8 100644
--- a/python/private/pypi/evaluate_markers.bzl
+++ b/python/private/pypi/evaluate_markers.bzl
@@ -14,9 +14,10 @@
"""A simple function that evaluates markers using a python interpreter."""
-load(":pep508_env.bzl", "env", _platform_from_str = "platform_from_str")
+load(":pep508_env.bzl", "env")
load(":pep508_evaluate.bzl", "evaluate")
-load(":pep508_req.bzl", _req = "requirement")
+load(":pep508_platform.bzl", "platform_from_str")
+load(":pep508_requirement.bzl", "requirement")
def evaluate_markers(requirements):
"""Return the list of supported platforms per requirements line.
@@ -29,9 +30,9 @@ def evaluate_markers(requirements):
"""
ret = {}
for req_string, platforms in requirements.items():
- req = _req(req_string)
+ req = requirement(req_string)
for platform in platforms:
- if evaluate(req.marker, env = env(_platform_from_str(platform, None))):
+ if evaluate(req.marker, env = env(platform_from_str(platform, None))):
ret.setdefault(req_string, []).append(platform)
return ret
diff --git a/python/private/pypi/pep508_env.bzl b/python/private/pypi/pep508_env.bzl
index 17d41871d1..265a8e9b99 100644
--- a/python/private/pypi/pep508_env.bzl
+++ b/python/private/pypi/pep508_env.bzl
@@ -15,7 +15,9 @@
"""This module is for implementing PEP508 environment definition.
"""
-# See https://stackoverflow.com/questions/45125516/possible-values-for-uname-m
+load(":pep508_platform.bzl", "platform_from_str")
+
+# See https://stackoverflow.com/a/45125525
_platform_machine_aliases = {
# These pairs mean the same hardware, but different values may be used
# on different host platforms.
@@ -24,13 +26,41 @@ _platform_machine_aliases = {
"i386": "x86_32",
"i686": "x86_32",
}
+
+# Platform system returns results from the `uname` call.
_platform_system_values = {
"linux": "Linux",
"osx": "Darwin",
"windows": "Windows",
}
+
+# The copy of SO [answer](https://stackoverflow.com/a/13874620) containing
+# all of the platforms:
+# ┍━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━┑
+# │ System │ Value │
+# ┝━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━┥
+# │ Linux │ linux or linux2 (*) │
+# │ Windows │ win32 │
+# │ Windows/Cygwin │ cygwin │
+# │ Windows/MSYS2 │ msys │
+# │ Mac OS X │ darwin │
+# │ OS/2 │ os2 │
+# │ OS/2 EMX │ os2emx │
+# │ RiscOS │ riscos │
+# │ AtheOS │ atheos │
+# │ FreeBSD 7 │ freebsd7 │
+# │ FreeBSD 8 │ freebsd8 │
+# │ FreeBSD N │ freebsdN │
+# │ OpenBSD 6 │ openbsd6 │
+# │ AIX │ aix (**) │
+# ┕━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━┙
+#
+# (*) Prior to Python 3.3, the value for any Linux version is always linux2; after, it is linux.
+# (**) Prior Python 3.8 could also be aix5 or aix7; use sys.platform.startswith()
+#
+# We are using only the subset that we actually support.
_sys_platform_values = {
- "linux": "posix",
+ "linux": "linux",
"osx": "darwin",
"windows": "win32",
}
@@ -61,6 +91,7 @@ def env(target_platform, *, extra = None):
"platform_release": "",
"platform_version": "",
}
+
if type(target_platform) == type(""):
target_platform = platform_from_str(target_platform, python_version = "")
@@ -87,31 +118,3 @@ def env(target_platform, *, extra = None):
"platform_machine": _platform_machine_aliases,
},
}
-
-def _platform(*, abi = None, os = None, arch = None):
- return struct(
- abi = abi,
- os = os,
- arch = arch,
- )
-
-def platform_from_str(p, python_version):
- """Return a platform from a string.
-
- Args:
- p: {type}`str` the actual string.
- python_version: {type}`str` the python version to add to platform if needed.
-
- Returns:
- A struct that is returned by the `_platform` function.
- """
- if p.startswith("cp"):
- abi, _, p = p.partition("_")
- elif python_version:
- major, _, tail = python_version.partition(".")
- abi = "cp{}{}".format(major, tail)
- else:
- abi = None
-
- os, _, arch = p.partition("_")
- return _platform(abi = abi, os = os or None, arch = arch or None)
diff --git a/python/private/pypi/pep508_platform.bzl b/python/private/pypi/pep508_platform.bzl
new file mode 100644
index 0000000000..381a8d7a08
--- /dev/null
+++ b/python/private/pypi/pep508_platform.bzl
@@ -0,0 +1,57 @@
+# Copyright 2025 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""The platform abstraction
+"""
+
+def platform(*, abi = None, os = None, arch = None):
+ """platform returns a struct for the platform.
+
+ Args:
+ abi: {type}`str | None` the target ABI, e.g. `"cp39"`.
+ os: {type}`str | None` the target os, e.g. `"linux"`.
+ arch: {type}`str | None` the target CPU, e.g. `"aarch64"`.
+
+ Returns:
+ A struct.
+ """
+
+ # Note, this is used a lot as a key in dictionaries, so it cannot contain
+ # methods.
+ return struct(
+ abi = abi,
+ os = os,
+ arch = arch,
+ )
+
+def platform_from_str(p, python_version):
+ """Return a platform from a string.
+
+ Args:
+ p: {type}`str` the actual string.
+ python_version: {type}`str` the python version to add to platform if needed.
+
+ Returns:
+ A struct that is returned by the `_platform` function.
+ """
+ if p.startswith("cp"):
+ abi, _, p = p.partition("_")
+ elif python_version:
+ major, _, tail = python_version.partition(".")
+ abi = "cp{}{}".format(major, tail)
+ else:
+ abi = None
+
+ os, _, arch = p.partition("_")
+ return platform(abi = abi, os = os or None, arch = arch or None)
diff --git a/python/private/pypi/pep508_req.bzl b/python/private/pypi/pep508_requirement.bzl
similarity index 82%
rename from python/private/pypi/pep508_req.bzl
rename to python/private/pypi/pep508_requirement.bzl
index 618ffaf17a..11f2b3e8fa 100644
--- a/python/private/pypi/pep508_req.bzl
+++ b/python/private/pypi/pep508_requirement.bzl
@@ -17,7 +17,7 @@
load("//python/private:normalize_name.bzl", "normalize_name")
-_STRIP = ["(", " ", ">", "=", "<", "~", "!"]
+_STRIP = ["(", " ", ">", "=", "<", "~", "!", "@"]
def requirement(spec):
"""Parse a PEP508 requirement line
@@ -28,15 +28,18 @@ def requirement(spec):
Returns:
A struct with the information.
"""
+ spec = spec.strip()
requires, _, maybe_hashes = spec.partition(";")
marker, _, _ = maybe_hashes.partition("--hash")
requires, _, extras_unparsed = requires.partition("[")
+ extras_unparsed, _, _ = extras_unparsed.partition("]")
for char in _STRIP:
requires, _, _ = requires.partition(char)
- extras = extras_unparsed.strip("]").split(",")
+ extras = extras_unparsed.replace(" ", "").split(",")
+ name = requires.strip(" ")
return struct(
- name = normalize_name(requires.strip(" ")),
+ name = normalize_name(name).replace("_", "-"),
marker = marker.strip(" "),
extras = extras,
)
diff --git a/tests/pypi/pep508/BUILD.bazel b/tests/pypi/pep508/BUILD.bazel
index b795db0591..575f28ada6 100644
--- a/tests/pypi/pep508/BUILD.bazel
+++ b/tests/pypi/pep508/BUILD.bazel
@@ -1,5 +1,10 @@
load(":evaluate_tests.bzl", "evaluate_test_suite")
+load(":requirement_tests.bzl", "requirement_test_suite")
evaluate_test_suite(
name = "evaluate_tests",
)
+
+requirement_test_suite(
+ name = "requirement_tests",
+)
diff --git a/tests/pypi/pep508/requirement_tests.bzl b/tests/pypi/pep508/requirement_tests.bzl
new file mode 100644
index 0000000000..7c81ea50fc
--- /dev/null
+++ b/tests/pypi/pep508/requirement_tests.bzl
@@ -0,0 +1,47 @@
+# Copyright 2025 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests for parsing the requirement specifier."""
+
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("//python/private/pypi:pep508_requirement.bzl", "requirement") # buildifier: disable=bzl-visibility
+
+_tests = []
+
+def _test_requirement_line_parsing(env):
+ want = {
+ " name1[ foo ] ": ("name1", ["foo"]),
+ "Name[foo]": ("name", ["foo"]),
+ "name [fred,bar] @ http://foo.com ; python_version=='2.7'": ("name", ["fred", "bar"]),
+ "name; (os_name=='a' or os_name=='b') and os_name=='c'": ("name", [""]),
+ "name@http://foo.com": ("name", [""]),
+ "name[ Foo123 ]": ("name", ["Foo123"]),
+ "name[extra]@http://foo.com": ("name", ["extra"]),
+ "name[foo]": ("name", ["foo"]),
+ "name[quux, strange];python_version<'2.7' and platform_version=='2'": ("name", ["quux", "strange"]),
+ "name_foo[bar]": ("name-foo", ["bar"]),
+ }
+
+ got = {
+ i: (parsed.name, parsed.extras)
+ for i, parsed in {case: requirement(case) for case in want}.items()
+ }
+ env.expect.that_dict(got).contains_exactly(want)
+
+_tests.append(_test_requirement_line_parsing)
+
+def requirement_test_suite(name): # buildifier: disable=function-docstring
+ test_suite(
+ name = name,
+ basic_tests = _tests,
+ )
From 6e2d493f3e8e12c7cf208a4e9a398c5eabb65f24 Mon Sep 17 00:00:00 2001
From: asa <96153+asa@users.noreply.github.com>
Date: Thu, 10 Apr 2025 17:44:56 -0700
Subject: [PATCH 021/156] fix: Prevent absolute path creation in uv lock
template (#2769)
This change fixes a bug in the `lock` rule where, when the package is at
the root level, the path to `requirements.txt` is constructed
incorrectly with a leading double slash (`//requirements.txt`), causing
it to be interpreted as an absolute path.
This change detects if the package is empty before constructing the
output path.
Work towards #1975
---------
Co-authored-by: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
---
python/uv/private/lock.bzl | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/python/uv/private/lock.bzl b/python/uv/private/lock.bzl
index 45a3819ee6..2731d6b009 100644
--- a/python/uv/private/lock.bzl
+++ b/python/uv/private/lock.bzl
@@ -327,10 +327,15 @@ def _maybe_file(path):
def _expand_template_impl(ctx):
pkg = ctx.label.package
update_src = ctx.actions.declare_file(ctx.attr.update_target + ".py")
+
+ # Fix the path construction to avoid absolute paths
+ # If package is empty (root), don't add a leading slash
+ dst = "{}/{}".format(pkg, ctx.attr.output) if pkg else ctx.attr.output
+
ctx.actions.expand_template(
template = ctx.files._template[0],
substitutions = {
- "{{dst}}": "{}/{}".format(pkg, ctx.attr.output),
+ "{{dst}}": dst,
"{{src}}": "{}".format(ctx.files.src[0].short_path),
"{{update_target}}": "//{}:{}".format(pkg, ctx.attr.update_target),
},
From 84351d4ec14e474bc196c0b8cd70e04fcc9a25ca Mon Sep 17 00:00:00 2001
From: "Elvis M. Wianda" <7077790+ewianda@users.noreply.github.com>
Date: Fri, 11 Apr 2025 17:18:46 -0600
Subject: [PATCH 022/156] fix: Resolve incorrect platform specific dependency
(#2766)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
This change addresses a bug where `pip.parse` selects the wrong
requirement entry when multiple extras are listed with platform-specific
markers.
#### 🔍 Problem:
In a `requirements.txt` generated by tools like `uv` or `poetry`, it's
valid to have multiple entries for the same package, each with different
extras and `sys_platform` markers, for example:
```ini
optimum[onnxruntime]==1.17.1 ; sys_platform == 'darwin'
optimum[onnxruntime-gpu]==1.17.1 ; sys_platform == 'linux'
```
The current implementation in
[`[parse_requirements.bzl](https://github.com/bazel-contrib/rules_python/blob/032f6aa738a673b13b605dabf55465c6fc1a56eb/python/private/pypi/parse_requirements.bzl#L114-L126)`](https://github.com/bazel-contrib/rules_python/blob/032f6aa738a673b13b605dabf55465c6fc1a56eb/python/private/pypi/parse_requirements.bzl#L114-L126)
uses a sort-by-length heuristic to select the “best” requirement when
there are multiple entries with the same base name. This works well in
legacy `requirements.txt` files where:
```
my_dep
my_dep[foo]
my_dep[foo,bar]
```
...would indicate an intent to select the **most specific subset of
extras** (i.e. the longest name).
However, this heuristic **breaks** in the presence of **platform
markers**, where extras are **not subsets**, but distinct variants. In
the example above, Bazel mistakenly selects `optimum[onnxruntime-gpu]`
on macOS because it's a longer match, even though it is guarded by a
Linux-only marker.
#### ✅ Fix:
This PR modifies the behavior to:
1. **Add the requirement marker** as part of the sorting key.
2. **Then apply the longest-match logic** to drop duplicate requirements
with different extras but the same markers.
This ensures that only applicable requirements are considered during
resolution, preserving correctness in multi-platform environments.
#### 🧪 Before:
On macOS, the following entry is incorrectly selected:
```
optimum[onnxruntime-gpu]==1.17.1 ; sys_platform == 'linux'
```
#### ✅ After:
Correct entry is selected:
```
optimum[onnxruntime]==1.17.1 ; sys_platform == 'darwin'
```
close https://github.com/bazel-contrib/rules_python/issues/2690
---------
Co-authored-by: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
---
CHANGELOG.md | 2 +
python/private/pypi/parse_requirements.bzl | 44 +++++-------
python/private/pypi/pep508_requirement.bzl | 11 +++
tests/pypi/extension/extension_tests.bzl | 78 ++++++++++++++++++++++
tests/pypi/pep508/requirement_tests.bzl | 23 ++++---
5 files changed, 119 insertions(+), 39 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index f38732f7d8..7d9b648bea 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -81,6 +81,8 @@ Unreleased changes template.
{#v0-0-0-fixed}
### Fixed
+* (pypi) Platform specific extras are now correctly handled when using
+ universal lock files with environment markers. Fixes [#2690](https://github.com/bazel-contrib/rules_python/pull/2690).
* (runfiles) ({obj}`--bootstrap_impl=script`) Follow symlinks when searching for runfiles.
* (toolchains) Do not try to run `chmod` when downloading non-windows hermetic toolchain
repositories on Windows. Fixes
diff --git a/python/private/pypi/parse_requirements.bzl b/python/private/pypi/parse_requirements.bzl
index d2014a7eb9..1cbf094f5c 100644
--- a/python/private/pypi/parse_requirements.bzl
+++ b/python/private/pypi/parse_requirements.bzl
@@ -30,22 +30,9 @@ load("//python/private:normalize_name.bzl", "normalize_name")
load("//python/private:repo_utils.bzl", "repo_utils")
load(":index_sources.bzl", "index_sources")
load(":parse_requirements_txt.bzl", "parse_requirements_txt")
+load(":pep508_requirement.bzl", "requirement")
load(":whl_target_platforms.bzl", "select_whls")
-def _extract_version(entry):
- """Extract the version part from the requirement string.
-
-
- Args:
- entry: {type}`str` The requirement string.
- """
- version_start = entry.find("==")
- if version_start != -1:
- # Extract everything after '==' until the next space or end of the string
- version, _, _ = entry[version_start + 2:].partition(" ")
- return version
- return None
-
def parse_requirements(
ctx,
*,
@@ -111,19 +98,20 @@ def parse_requirements(
# The requirement lines might have duplicate names because lines for extras
# are returned as just the base package name. e.g., `foo[bar]` results
# in an entry like `("foo", "foo[bar] == 1.0 ...")`.
- requirements_dict = {
- (normalize_name(entry[0]), _extract_version(entry[1])): entry
- for entry in sorted(
- parse_result.requirements,
- # Get the longest match and fallback to original WORKSPACE sorting,
- # which should get us the entry with most extras.
- #
- # FIXME @aignas 2024-05-13: The correct behaviour might be to get an
- # entry with all aggregated extras, but it is unclear if we
- # should do this now.
- key = lambda x: (len(x[1].partition("==")[0]), x),
- )
- }.values()
+ # Lines with different markers are not condidered duplicates.
+ requirements_dict = {}
+ for entry in sorted(
+ parse_result.requirements,
+ # Get the longest match and fallback to original WORKSPACE sorting,
+ # which should get us the entry with most extras.
+ #
+ # FIXME @aignas 2024-05-13: The correct behaviour might be to get an
+ # entry with all aggregated extras, but it is unclear if we
+ # should do this now.
+ key = lambda x: (len(x[1].partition("==")[0]), x),
+ ):
+ req = requirement(entry[1])
+ requirements_dict[(req.name, req.version, req.marker)] = entry
tokenized_options = []
for opt in parse_result.options:
@@ -132,7 +120,7 @@ def parse_requirements(
pip_args = tokenized_options + extra_pip_args
for plat in plats:
- requirements[plat] = requirements_dict
+ requirements[plat] = requirements_dict.values()
options[plat] = pip_args
requirements_by_platform = {}
diff --git a/python/private/pypi/pep508_requirement.bzl b/python/private/pypi/pep508_requirement.bzl
index 11f2b3e8fa..ee7b5dfc35 100644
--- a/python/private/pypi/pep508_requirement.bzl
+++ b/python/private/pypi/pep508_requirement.bzl
@@ -30,6 +30,16 @@ def requirement(spec):
"""
spec = spec.strip()
requires, _, maybe_hashes = spec.partition(";")
+
+ version_start = requires.find("==")
+ version = None
+ if version_start != -1:
+ # Extract everything after '==' until the next space or end of the string
+ version, _, _ = requires[version_start + 2:].partition(" ")
+
+ # Remove any trailing characters from the version string
+ version = version.strip(" ")
+
marker, _, _ = maybe_hashes.partition("--hash")
requires, _, extras_unparsed = requires.partition("[")
extras_unparsed, _, _ = extras_unparsed.partition("]")
@@ -42,4 +52,5 @@ def requirement(spec):
name = normalize_name(name).replace("_", "-"),
marker = marker.strip(" "),
extras = extras,
+ version = version,
)
diff --git a/tests/pypi/extension/extension_tests.bzl b/tests/pypi/extension/extension_tests.bzl
index 1652e76156..66c9e0549e 100644
--- a/tests/pypi/extension/extension_tests.bzl
+++ b/tests/pypi/extension/extension_tests.bzl
@@ -856,6 +856,84 @@ git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef
_tests.append(_test_simple_get_index)
+def _test_optimum_sys_platform_extra(env):
+ pypi = _parse_modules(
+ env,
+ module_ctx = _mock_mctx(
+ _mod(
+ name = "rules_python",
+ parse = [
+ _parse(
+ hub_name = "pypi",
+ python_version = "3.15",
+ requirements_lock = "universal.txt",
+ ),
+ ],
+ ),
+ read = lambda x: {
+ "universal.txt": """\
+optimum[onnxruntime]==1.17.1 ; sys_platform == 'darwin'
+optimum[onnxruntime-gpu]==1.17.1 ; sys_platform == 'linux'
+""",
+ }[x],
+ ),
+ available_interpreters = {
+ "python_3_15_host": "unit_test_interpreter_target",
+ },
+ )
+
+ pypi.exposed_packages().contains_exactly({"pypi": []})
+ pypi.hub_group_map().contains_exactly({"pypi": {}})
+ pypi.hub_whl_map().contains_exactly({
+ "pypi": {
+ "optimum": {
+ "pypi_315_optimum_linux_aarch64_linux_arm_linux_ppc_linux_s390x_linux_x86_64": [
+ whl_config_setting(
+ version = "3.15",
+ target_platforms = [
+ "cp315_linux_aarch64",
+ "cp315_linux_arm",
+ "cp315_linux_ppc",
+ "cp315_linux_s390x",
+ "cp315_linux_x86_64",
+ ],
+ config_setting = None,
+ filename = None,
+ ),
+ ],
+ "pypi_315_optimum_osx_aarch64_osx_x86_64": [
+ whl_config_setting(
+ version = "3.15",
+ target_platforms = [
+ "cp315_osx_aarch64",
+ "cp315_osx_x86_64",
+ ],
+ config_setting = None,
+ filename = None,
+ ),
+ ],
+ },
+ },
+ })
+
+ pypi.whl_libraries().contains_exactly({
+ "pypi_315_optimum_linux_aarch64_linux_arm_linux_ppc_linux_s390x_linux_x86_64": {
+ "dep_template": "@pypi//{name}:{target}",
+ "python_interpreter_target": "unit_test_interpreter_target",
+ "repo": "pypi_315",
+ "requirement": "optimum[onnxruntime-gpu]==1.17.1",
+ },
+ "pypi_315_optimum_osx_aarch64_osx_x86_64": {
+ "dep_template": "@pypi//{name}:{target}",
+ "python_interpreter_target": "unit_test_interpreter_target",
+ "repo": "pypi_315",
+ "requirement": "optimum[onnxruntime]==1.17.1",
+ },
+ })
+ pypi.whl_mods().contains_exactly({})
+
+_tests.append(_test_optimum_sys_platform_extra)
+
def extension_test_suite(name):
"""Create the test suite.
diff --git a/tests/pypi/pep508/requirement_tests.bzl b/tests/pypi/pep508/requirement_tests.bzl
index 7c81ea50fc..9afb43a437 100644
--- a/tests/pypi/pep508/requirement_tests.bzl
+++ b/tests/pypi/pep508/requirement_tests.bzl
@@ -20,20 +20,21 @@ _tests = []
def _test_requirement_line_parsing(env):
want = {
- " name1[ foo ] ": ("name1", ["foo"]),
- "Name[foo]": ("name", ["foo"]),
- "name [fred,bar] @ http://foo.com ; python_version=='2.7'": ("name", ["fred", "bar"]),
- "name; (os_name=='a' or os_name=='b') and os_name=='c'": ("name", [""]),
- "name@http://foo.com": ("name", [""]),
- "name[ Foo123 ]": ("name", ["Foo123"]),
- "name[extra]@http://foo.com": ("name", ["extra"]),
- "name[foo]": ("name", ["foo"]),
- "name[quux, strange];python_version<'2.7' and platform_version=='2'": ("name", ["quux", "strange"]),
- "name_foo[bar]": ("name-foo", ["bar"]),
+ " name1[ foo ] ": ("name1", ["foo"], None, ""),
+ "Name[foo]": ("name", ["foo"], None, ""),
+ "name [fred,bar] @ http://foo.com ; python_version=='2.7'": ("name", ["fred", "bar"], None, "python_version=='2.7'"),
+ "name; (os_name=='a' or os_name=='b') and os_name=='c'": ("name", [""], None, "(os_name=='a' or os_name=='b') and os_name=='c'"),
+ "name@http://foo.com": ("name", [""], None, ""),
+ "name[ Foo123 ]": ("name", ["Foo123"], None, ""),
+ "name[extra]@http://foo.com": ("name", ["extra"], None, ""),
+ "name[foo]": ("name", ["foo"], None, ""),
+ "name[quux, strange];python_version<'2.7' and platform_version=='2'": ("name", ["quux", "strange"], None, "python_version<'2.7' and platform_version=='2'"),
+ "name_foo[bar]": ("name-foo", ["bar"], None, ""),
+ "name_foo[bar]==0.25": ("name-foo", ["bar"], "0.25", ""),
}
got = {
- i: (parsed.name, parsed.extras)
+ i: (parsed.name, parsed.extras, parsed.version, parsed.marker)
for i, parsed in {case: requirement(case) for case in want}.items()
}
env.expect.that_dict(got).contains_exactly(want)
From aa0d16c1463e4e26f6ed633ae83d9785a2ea9dfa Mon Sep 17 00:00:00 2001
From: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
Date: Mon, 14 Apr 2025 07:10:51 +0900
Subject: [PATCH 023/156] fix(rules): make the srcs trully optional (#2768)
With this PR we mark the srcs attribute as optional as we can
leverage the `main_module` to just run things from the deps.
This also removes a long-standing `TODO` note.
Fixes #2765
---------
Co-authored-by: Richard Levasseur
---
CHANGELOG.md | 2 +
python/private/py_executable.bzl | 3 +-
tests/base_rules/py_executable_base_tests.bzl | 72 ++++++++++++-------
tests/support/support.bzl | 1 +
4 files changed, 53 insertions(+), 25 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 7d9b648bea..33d99dfaa1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -76,6 +76,8 @@ Unreleased changes template.
* (pypi) The PyPI extension will no longer write the lock file entries as the
extension has been marked reproducible.
Fixes [#2434](https://github.com/bazel-contrib/rules_python/issues/2434).
+* (rules) {attr}`py_binary.srcs` and {attr}`py_test.srcs` is no longer mandatory when
+ `main_module` is specified (for `--bootstrap_impl=script`)
[20250317]: https://github.com/astral-sh/python-build-standalone/releases/tag/20250317
diff --git a/python/private/py_executable.bzl b/python/private/py_executable.bzl
index e6f4700b20..dd3ad869fa 100644
--- a/python/private/py_executable.bzl
+++ b/python/private/py_executable.bzl
@@ -786,6 +786,8 @@ def _create_stage1_bootstrap(
)
template = runtime.bootstrap_template
subs["%shebang%"] = runtime.stub_shebang
+ elif not ctx.files.srcs:
+ fail("mandatory 'srcs' files have not been provided")
else:
if (ctx.configuration.coverage_enabled and
runtime and
@@ -1888,7 +1890,6 @@ def create_executable_rule_builder(implementation, **kwargs):
),
**kwargs
)
- builder.attrs.get("srcs").set_mandatory(True)
return builder
def cc_configure_features(
diff --git a/tests/base_rules/py_executable_base_tests.bzl b/tests/base_rules/py_executable_base_tests.bzl
index 3cc6dfb702..37707831fc 100644
--- a/tests/base_rules/py_executable_base_tests.bzl
+++ b/tests/base_rules/py_executable_base_tests.bzl
@@ -24,7 +24,7 @@ load("//python/private:util.bzl", "IS_BAZEL_7_OR_HIGHER") # buildifier: disable
load("//tests/base_rules:base_tests.bzl", "create_base_tests")
load("//tests/base_rules:util.bzl", "WINDOWS_ATTR", pt_util = "util")
load("//tests/support:py_executable_info_subject.bzl", "PyExecutableInfoSubject")
-load("//tests/support:support.bzl", "CC_TOOLCHAIN", "CROSSTOOL_TOP", "LINUX_X86_64", "WINDOWS_X86_64")
+load("//tests/support:support.bzl", "BOOTSTRAP_IMPL", "CC_TOOLCHAIN", "CROSSTOOL_TOP", "LINUX_X86_64", "WINDOWS_X86_64")
_tests = []
@@ -342,6 +342,53 @@ def _test_name_cannot_end_in_py_impl(env, target):
matching.str_matches("name must not end in*.py"),
)
+def _test_main_module_bootstrap_system_python(name, config):
+ rt_util.helper_target(
+ config.rule,
+ name = name + "_subject",
+ main_module = "dummy",
+ )
+ analysis_test(
+ name = name,
+ impl = _test_main_module_bootstrap_system_python_impl,
+ target = name + "_subject",
+ config_settings = {
+ BOOTSTRAP_IMPL: "system_python",
+ "//command_line_option:platforms": [LINUX_X86_64],
+ },
+ expect_failure = True,
+ )
+
+def _test_main_module_bootstrap_system_python_impl(env, target):
+ env.expect.that_target(target).failures().contains_predicate(
+ matching.str_matches("mandatory*srcs"),
+ )
+
+_tests.append(_test_main_module_bootstrap_system_python)
+
+def _test_main_module_bootstrap_script(name, config):
+ rt_util.helper_target(
+ config.rule,
+ name = name + "_subject",
+ main_module = "dummy",
+ )
+ analysis_test(
+ name = name,
+ impl = _test_main_module_bootstrap_script_impl,
+ target = name + "_subject",
+ config_settings = {
+ BOOTSTRAP_IMPL: "script",
+ "//command_line_option:platforms": [LINUX_X86_64],
+ },
+ )
+
+def _test_main_module_bootstrap_script_impl(env, target):
+ env.expect.that_target(target).default_outputs().contains(
+ "{package}/{test_name}_subject",
+ )
+
+_tests.append(_test_main_module_bootstrap_script)
+
def _test_py_runtime_info_provided(name, config):
rt_util.helper_target(
config.rule,
@@ -365,29 +412,6 @@ def _test_py_runtime_info_provided_impl(env, target):
_tests.append(_test_py_runtime_info_provided)
-# Can't test this -- mandatory validation happens before analysis test
-# can intercept it
-# TODO(#1069): Once re-implemented in Starlark, modify rule logic to make this
-# testable.
-# def _test_srcs_is_mandatory(name, config):
-# rt_util.helper_target(
-# config.rule,
-# name = name + "_subject",
-# )
-# analysis_test(
-# name = name,
-# impl = _test_srcs_is_mandatory,
-# target = name + "_subject",
-# expect_failure = True,
-# )
-#
-# _tests.append(_test_srcs_is_mandatory)
-#
-# def _test_srcs_is_mandatory_impl(env, target):
-# env.expect.that_target(target).failures().contains_predicate(
-# matching.str_matches("mandatory*srcs"),
-# )
-
# =====
# You were gonna add a test at the end, weren't you?
# Nope. Please keep them sorted; put it in its alphabetical location.
diff --git a/tests/support/support.bzl b/tests/support/support.bzl
index 2b6703843b..6330155d8c 100644
--- a/tests/support/support.bzl
+++ b/tests/support/support.bzl
@@ -35,6 +35,7 @@ CROSSTOOL_TOP = Label("//tests/support/cc_toolchains:cc_toolchain_suite")
# str() around Label() is necessary because rules_testing's config_settings
# doesn't accept yet Label objects.
ADD_SRCS_TO_RUNFILES = str(Label("//python/config_settings:add_srcs_to_runfiles"))
+BOOTSTRAP_IMPL = str(Label("//python/config_settings:bootstrap_impl"))
EXEC_TOOLS_TOOLCHAIN = str(Label("//python/config_settings:exec_tools_toolchain"))
PRECOMPILE = str(Label("//python/config_settings:precompile"))
PRECOMPILE_SOURCE_RETENTION = str(Label("//python/config_settings:precompile_source_retention"))
From 2cb920c1e52a85239d6bcc38919fbf143b514dac Mon Sep 17 00:00:00 2001
From: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
Date: Mon, 14 Apr 2025 08:32:10 +0900
Subject: [PATCH 024/156] refactor(pypi): translate wheel METADATA parsing to
starlark (#2629)
This PR starts using the newly introduced (#2692) PEP508 compliant
requirement marker parser in starlark and moves the dependency
generation from the Python language (`whl_installer`) to the Starlark
in the `whl_library` repository rule.
This PR is (almost) a pure refactor where no bugs are fixed, but this is
foundational work that also adds notes on how things will be moved
to macros (i.e. analysis phase) so that we can fix a few long standing
bugs and prepare for stabilizing the `experimental_index_url` (#260).
Refactor:
* I have migrated all of the unit tests from Python to starlark for deps
generation from METADATA `Requires-Dist` fields.
* Read the `METADATA` file itself in Starlark.
Work towards #260, #2319, #2241
Fixes #2423
---
python/private/pypi/BUILD.bazel | 19 +
python/private/pypi/pep508_deps.bzl | 351 ++++++++++++++++
python/private/pypi/pep508_evaluate.bzl | 13 +-
python/private/pypi/whl_installer/BUILD.bazel | 1 -
.../private/pypi/whl_installer/arguments.py | 8 -
python/private/pypi/whl_installer/platform.py | 304 --------------
python/private/pypi/whl_installer/wheel.py | 281 -------------
.../pypi/whl_installer/wheel_installer.py | 37 +-
python/private/pypi/whl_library.bzl | 57 ++-
python/private/pypi/whl_library_targets.bzl | 2 -
python/private/pypi/whl_metadata.bzl | 108 +++++
tests/pypi/pep508/BUILD.bazel | 5 +
tests/pypi/pep508/deps_tests.bzl | 385 ++++++++++++++++++
tests/pypi/pep508/evaluate_tests.bzl | 2 +
tests/pypi/whl_installer/BUILD.bazel | 24 --
tests/pypi/whl_installer/arguments_test.py | 14 +-
tests/pypi/whl_installer/platform_test.py | 154 -------
.../whl_installer/wheel_installer_test.py | 42 +-
tests/pypi/whl_installer/wheel_test.py | 371 -----------------
tests/pypi/whl_metadata/BUILD.bazel | 5 +
.../pypi/whl_metadata/whl_metadata_tests.bzl | 147 +++++++
21 files changed, 1099 insertions(+), 1231 deletions(-)
create mode 100644 python/private/pypi/pep508_deps.bzl
delete mode 100644 python/private/pypi/whl_installer/platform.py
create mode 100644 python/private/pypi/whl_metadata.bzl
create mode 100644 tests/pypi/pep508/deps_tests.bzl
delete mode 100644 tests/pypi/whl_installer/platform_test.py
delete mode 100644 tests/pypi/whl_installer/wheel_test.py
create mode 100644 tests/pypi/whl_metadata/BUILD.bazel
create mode 100644 tests/pypi/whl_metadata/whl_metadata_tests.bzl
diff --git a/python/private/pypi/BUILD.bazel b/python/private/pypi/BUILD.bazel
index e0a2f20c14..7297238cb4 100644
--- a/python/private/pypi/BUILD.bazel
+++ b/python/private/pypi/BUILD.bazel
@@ -221,6 +221,18 @@ bzl_library(
],
)
+bzl_library(
+ name = "pep508_deps_bzl",
+ srcs = ["pep508_deps.bzl"],
+ deps = [
+ ":pep508_env_bzl",
+ ":pep508_evaluate_bzl",
+ ":pep508_platform_bzl",
+ ":pep508_requirement_bzl",
+ "//python/private:normalize_name_bzl",
+ ],
+)
+
bzl_library(
name = "pep508_env_bzl",
srcs = ["pep508_env.bzl"],
@@ -368,7 +380,9 @@ bzl_library(
":generate_whl_library_build_bazel_bzl",
":parse_whl_name_bzl",
":patch_whl_bzl",
+ ":pep508_deps_bzl",
":pypi_repo_utils_bzl",
+ ":whl_metadata_bzl",
":whl_target_platforms_bzl",
"//python/private:auth_bzl",
"//python/private:envsubst_bzl",
@@ -377,6 +391,11 @@ bzl_library(
],
)
+bzl_library(
+ name = "whl_metadata_bzl",
+ srcs = ["whl_metadata.bzl"],
+)
+
bzl_library(
name = "whl_repo_name_bzl",
srcs = ["whl_repo_name.bzl"],
diff --git a/python/private/pypi/pep508_deps.bzl b/python/private/pypi/pep508_deps.bzl
new file mode 100644
index 0000000000..af0a75362b
--- /dev/null
+++ b/python/private/pypi/pep508_deps.bzl
@@ -0,0 +1,351 @@
+# Copyright 2025 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""This module is for implementing PEP508 compliant METADATA deps parsing.
+"""
+
+load("//python/private:normalize_name.bzl", "normalize_name")
+load(":pep508_env.bzl", "env")
+load(":pep508_evaluate.bzl", "evaluate")
+load(":pep508_platform.bzl", "platform", "platform_from_str")
+load(":pep508_requirement.bzl", "requirement")
+
+_ALL_OS_VALUES = [
+ "windows",
+ "osx",
+ "linux",
+]
+_ALL_ARCH_VALUES = [
+ "aarch64",
+ "ppc64",
+ "ppc64le",
+ "s390x",
+ "x86_32",
+ "x86_64",
+]
+
+def deps(name, *, requires_dist, platforms = [], extras = [], host_python_version = None):
+ """Parse the RequiresDist from wheel METADATA
+
+ Args:
+ name: {type}`str` the name of the wheel.
+ requires_dist: {type}`list[str]` the list of RequiresDist lines from the
+ METADATA file.
+ extras: {type}`list[str]` the requested extras to generate targets for.
+ platforms: {type}`list[str]` the list of target platform strings.
+ host_python_version: {type}`str` the host python version.
+
+ Returns:
+ A struct with attributes:
+ * deps: {type}`list[str]` dependencies to include unconditionally.
+ * deps_select: {type}`dict[str, list[str]]` dependencies to include on particular
+ subset of target platforms.
+ """
+ reqs = sorted(
+ [requirement(r) for r in requires_dist],
+ key = lambda x: "{}:{}:".format(x.name, sorted(x.extras), x.marker),
+ )
+ deps = {}
+ deps_select = {}
+ name = normalize_name(name)
+ want_extras = _resolve_extras(name, reqs, extras)
+
+ # drop self edges
+ reqs = [r for r in reqs if r.name != name]
+
+ platforms = [
+ platform_from_str(p, python_version = host_python_version)
+ for p in platforms
+ ] or [
+ platform_from_str("", python_version = host_python_version),
+ ]
+
+ abis = sorted({p.abi: True for p in platforms if p.abi})
+ if host_python_version and len(abis) > 1:
+ _, _, minor_version = host_python_version.partition(".")
+ minor_version, _, _ = minor_version.partition(".")
+ default_abi = "cp3" + minor_version
+ elif len(abis) > 1:
+ fail(
+ "all python versions need to be specified explicitly, got: {}".format(platforms),
+ )
+ else:
+ default_abi = None
+
+ for req in reqs:
+ _add_req(
+ deps,
+ deps_select,
+ req,
+ extras = want_extras,
+ platforms = platforms,
+ default_abi = default_abi,
+ )
+
+ return struct(
+ deps = sorted(deps),
+ deps_select = {
+ _platform_str(p): sorted(deps)
+ for p, deps in deps_select.items()
+ },
+ )
+
+def _platform_str(self):
+ if self.abi == None:
+ if not self.os and not self.arch:
+ return "//conditions:default"
+ elif not self.arch:
+ return "@platforms//os:{}".format(self.os)
+ else:
+ return "{}_{}".format(self.os, self.arch)
+
+ minor_version = self.abi[3:]
+ if self.arch == None and self.os == None:
+ return str(Label("//python/config_settings:is_python_3.{}".format(minor_version)))
+
+ return "cp3{}_{}_{}".format(
+ minor_version,
+ self.os or "anyos",
+ self.arch or "anyarch",
+ )
+
+def _platform_specializations(self, cpu_values = _ALL_ARCH_VALUES, os_values = _ALL_OS_VALUES):
+ """Return the platform itself and all its unambiguous specializations.
+
+ For more info about specializations see
+ https://bazel.build/docs/configurable-attributes
+ """
+ specializations = []
+ specializations.append(self)
+ if self.arch == None:
+ specializations.extend([
+ platform(os = self.os, arch = arch, abi = self.abi)
+ for arch in cpu_values
+ ])
+ if self.os == None:
+ specializations.extend([
+ platform(os = os, arch = self.arch, abi = self.abi)
+ for os in os_values
+ ])
+ if self.os == None and self.arch == None:
+ specializations.extend([
+ platform(os = os, arch = arch, abi = self.abi)
+ for os in os_values
+ for arch in cpu_values
+ ])
+ return specializations
+
+def _add(deps, deps_select, dep, platform):
+ dep = normalize_name(dep)
+
+ if platform == None:
+ deps[dep] = True
+
+ # If the dep is in the platform-specific list, remove it from the select.
+ pop_keys = []
+ for p, _deps in deps_select.items():
+ if dep not in _deps:
+ continue
+
+ _deps.pop(dep)
+ if not _deps:
+ pop_keys.append(p)
+
+ for p in pop_keys:
+ deps_select.pop(p)
+ return
+
+ if dep in deps:
+ # If the dep is already in the main dependency list, no need to add it in the
+ # platform-specific dependency list.
+ return
+
+ # Add the platform-specific branch
+ deps_select.setdefault(platform, {})
+
+ # Add the dep to specializations of the given platform if they
+ # exist in the select statement.
+ for p in _platform_specializations(platform):
+ if p not in deps_select:
+ continue
+
+ deps_select[p][dep] = True
+
+ if len(deps_select[platform]) == 1:
+ # We are adding a new item to the select and we need to ensure that
+ # existing dependencies from less specialized platforms are propagated
+ # to the newly added dependency set.
+ for p, _deps in deps_select.items():
+ # Check if the existing platform overlaps with the given platform
+ if p == platform or platform not in _platform_specializations(p):
+ continue
+
+ deps_select[platform].update(_deps)
+
+def _maybe_add_common_dep(deps, deps_select, platforms, dep):
+ abis = sorted({p.abi: True for p in platforms if p.abi})
+ if len(abis) < 2:
+ return
+
+ platforms = [platform()] + [
+ platform(abi = abi)
+ for abi in abis
+ ]
+
+ # If the dep is targeting all target python versions, lets add it to
+ # the common dependency list to simplify the select statements.
+ for p in platforms:
+ if p not in deps_select:
+ return
+
+ if dep not in deps_select[p]:
+ return
+
+ # All of the python version-specific branches have the dep, so lets add
+ # it to the common deps.
+ deps[dep] = True
+ for p in platforms:
+ deps_select[p].pop(dep)
+ if not deps_select[p]:
+ deps_select.pop(p)
+
+def _resolve_extras(self_name, reqs, extras):
+ """Resolve extras which are due to depending on self[some_other_extra].
+
+ Some packages may have cyclic dependencies resulting from extras being used, one example is
+ `etils`, where we have one set of extras as aliases for other extras
+ and we have an extra called 'all' that includes all other extras.
+
+ Example: github.com/google/etils/blob/a0b71032095db14acf6b33516bca6d885fe09e35/pyproject.toml#L32.
+
+ When the `requirements.txt` is generated by `pip-tools`, then it is likely that
+ this step is not needed, but for other `requirements.txt` files this may be useful.
+
+ NOTE @aignas 2023-12-08: the extra resolution is not platform dependent,
+ but in order for it to become platform dependent we would have to have
+ separate targets for each extra in extras.
+ """
+
+ # Resolve any extra extras due to self-edges, empty string means no
+ # extras The empty string in the set is just a way to make the handling
+ # of no extras and a single extra easier and having a set of {"", "foo"}
+ # is equivalent to having {"foo"}.
+ extras = extras or [""]
+
+ self_reqs = []
+ for req in reqs:
+ if req.name != self_name:
+ continue
+
+ if req.marker == None:
+ # I am pretty sure we cannot reach this code as it does not
+ # make sense to specify packages in this way, but since it is
+ # easy to handle, lets do it.
+ #
+ # TODO @aignas 2023-12-08: add a test
+ extras = extras + req.extras
+ else:
+ # process these in a separate loop
+ self_reqs.append(req)
+
+ # A double loop is not strictly optimal, but always correct without recursion
+ for req in self_reqs:
+ if [True for extra in extras if evaluate(req.marker, env = {"extra": extra})]:
+ extras = extras + req.extras
+ else:
+ continue
+
+ # Iterate through all packages to ensure that we include all of the extras from previously
+ # visited packages.
+ for req_ in self_reqs:
+ if [True for extra in extras if evaluate(req.marker, env = {"extra": extra})]:
+ extras = extras + req_.extras
+
+ # Poor mans set
+ return sorted({x: None for x in extras})
+
+def _add_req(deps, deps_select, req, *, extras, platforms, default_abi = None):
+ if not req.marker:
+ _add(deps, deps_select, req.name, None)
+ return
+
+ # NOTE @aignas 2023-12-08: in order to have reasonable select statements
+ # we do have to have some parsing of the markers, so it begs the question
+ # if packaging should be reimplemented in Starlark to have the best solution
+ # for now we will implement it in Python and see what the best parsing result
+ # can be before making this decision.
+ match_os = len([
+ tag
+ for tag in [
+ "os_name",
+ "sys_platform",
+ "platform_system",
+ ]
+ if tag in req.marker
+ ]) > 0
+ match_arch = "platform_machine" in req.marker
+ match_version = "version" in req.marker
+
+ if not (match_os or match_arch or match_version):
+ if [
+ True
+ for extra in extras
+ for p in platforms
+ if evaluate(
+ req.marker,
+ env = env(
+ target_platform = p,
+ extra = extra,
+ ),
+ )
+ ]:
+ _add(deps, deps_select, req.name, None)
+ return
+
+ for plat in platforms:
+ if not [
+ True
+ for extra in extras
+ if evaluate(
+ req.marker,
+ env = env(
+ target_platform = plat,
+ extra = extra,
+ ),
+ )
+ ]:
+ continue
+
+ if match_arch and default_abi:
+ _add(deps, deps_select, req.name, plat)
+ if plat.abi == default_abi:
+ _add(deps, deps_select, req.name, platform(os = plat.os, arch = plat.arch))
+ elif match_arch:
+ _add(deps, deps_select, req.name, platform(os = plat.os, arch = plat.arch))
+ elif match_os and default_abi:
+ _add(deps, deps_select, req.name, platform(os = plat.os, abi = plat.abi))
+ if plat.abi == default_abi:
+ _add(deps, deps_select, req.name, platform(os = plat.os))
+ elif match_os:
+ _add(deps, deps_select, req.name, platform(os = plat.os))
+ elif match_version and default_abi:
+ _add(deps, deps_select, req.name, platform(abi = plat.abi))
+ if plat.abi == default_abi:
+ _add(deps, deps_select, req.name, platform())
+ elif match_version:
+ _add(deps, deps_select, req.name, None)
+ else:
+ fail("BUG: {} support is not implemented".format(req.marker))
+
+ _maybe_add_common_dep(deps, deps_select, platforms, req.name)
diff --git a/python/private/pypi/pep508_evaluate.bzl b/python/private/pypi/pep508_evaluate.bzl
index f45eb75cdb..f8ef553034 100644
--- a/python/private/pypi/pep508_evaluate.bzl
+++ b/python/private/pypi/pep508_evaluate.bzl
@@ -138,7 +138,7 @@ def evaluate(marker, *, env, strict = True, **kwargs):
"""
tokens = tokenize(marker)
- ast = _new_expr(**kwargs)
+ ast = _new_expr(marker = marker, **kwargs)
for _ in range(len(tokens) * 2):
if not tokens:
break
@@ -219,17 +219,20 @@ def _not_fn(x):
return not x
def _new_expr(
+ *,
+ marker,
and_fn = _and_fn,
or_fn = _or_fn,
not_fn = _not_fn):
# buildifier: disable=uninitialized
self = struct(
+ marker = marker,
tree = [],
parse = lambda **kwargs: _parse(self, **kwargs),
value = lambda: _value(self),
# This is a way for us to have a handle to the currently constructed
# expression tree branch.
- current = lambda: self._current[0] if self._current else None,
+ current = lambda: self._current[-1] if self._current else None,
_current = [],
_and = and_fn,
_or = or_fn,
@@ -313,6 +316,7 @@ def marker_expr(left, op, right, *, env, strict = True):
#
# The following normalizes the values
left = env.get(_ENV_ALIASES, {}).get(var_name, {}).get(left, left)
+
else:
var_name = left
left = env[left]
@@ -392,12 +396,15 @@ def _append(self, value):
current.tree.append(value)
elif hasattr(current.tree[-1], "append"):
current.tree[-1].append(value)
- else:
+ elif hasattr(current.tree, "_append"):
current.tree._append(value)
+ else:
+ fail("Cannot evaluate '{}' in '{}', current: {}".format(value, self.marker, current))
def _open_parenthesis(self):
"""Add an extra node into the tree to perform evaluate inside parenthesis."""
self._current.append(_new_expr(
+ marker = self.marker,
and_fn = self._and,
or_fn = self._or,
not_fn = self._not,
diff --git a/python/private/pypi/whl_installer/BUILD.bazel b/python/private/pypi/whl_installer/BUILD.bazel
index 5fb617004d..49f1a119c1 100644
--- a/python/private/pypi/whl_installer/BUILD.bazel
+++ b/python/private/pypi/whl_installer/BUILD.bazel
@@ -6,7 +6,6 @@ py_library(
srcs = [
"arguments.py",
"namespace_pkgs.py",
- "platform.py",
"wheel.py",
"wheel_installer.py",
],
diff --git a/python/private/pypi/whl_installer/arguments.py b/python/private/pypi/whl_installer/arguments.py
index 29bea8026e..bb841ea9ab 100644
--- a/python/private/pypi/whl_installer/arguments.py
+++ b/python/private/pypi/whl_installer/arguments.py
@@ -17,8 +17,6 @@
import pathlib
from typing import Any, Dict, Set
-from python.private.pypi.whl_installer.platform import Platform
-
def parser(**kwargs: Any) -> argparse.ArgumentParser:
"""Create a parser for the wheel_installer tool."""
@@ -41,12 +39,6 @@ def parser(**kwargs: Any) -> argparse.ArgumentParser:
action="store",
help="Extra arguments to pass down to pip.",
)
- parser.add_argument(
- "--platform",
- action="extend",
- type=Platform.from_string,
- help="Platforms to target dependencies. Can be used multiple times.",
- )
parser.add_argument(
"--pip_data_exclude",
action="store",
diff --git a/python/private/pypi/whl_installer/platform.py b/python/private/pypi/whl_installer/platform.py
deleted file mode 100644
index 11dd6e37ab..0000000000
--- a/python/private/pypi/whl_installer/platform.py
+++ /dev/null
@@ -1,304 +0,0 @@
-# Copyright 2024 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Utility class to inspect an extracted wheel directory"""
-
-import platform
-import sys
-from dataclasses import dataclass
-from enum import Enum
-from typing import Any, Dict, Iterator, List, Optional, Union
-
-
-class OS(Enum):
- linux = 1
- osx = 2
- windows = 3
- darwin = osx
- win32 = windows
-
- @classmethod
- def interpreter(cls) -> "OS":
- "Return the interpreter operating system."
- return cls[sys.platform.lower()]
-
- def __str__(self) -> str:
- return self.name.lower()
-
-
-class Arch(Enum):
- x86_64 = 1
- x86_32 = 2
- aarch64 = 3
- ppc = 4
- ppc64le = 5
- s390x = 6
- arm = 7
- amd64 = x86_64
- arm64 = aarch64
- i386 = x86_32
- i686 = x86_32
- x86 = x86_32
-
- @classmethod
- def interpreter(cls) -> "Arch":
- "Return the currently running interpreter architecture."
- # FIXME @aignas 2023-12-13: Hermetic toolchain on Windows 3.11.6
- # is returning an empty string here, so lets default to x86_64
- return cls[platform.machine().lower() or "x86_64"]
-
- def __str__(self) -> str:
- return self.name.lower()
-
-
-def _as_int(value: Optional[Union[OS, Arch]]) -> int:
- """Convert one of the enums above to an int for easier sorting algorithms.
-
- Args:
- value: The value of an enum or None.
-
- Returns:
- -1 if we get None, otherwise, the numeric value of the given enum.
- """
- if value is None:
- return -1
-
- return int(value.value)
-
-
-def host_interpreter_minor_version() -> int:
- return sys.version_info.minor
-
-
-@dataclass(frozen=True)
-class Platform:
- os: Optional[OS] = None
- arch: Optional[Arch] = None
- minor_version: Optional[int] = None
-
- @classmethod
- def all(
- cls,
- want_os: Optional[OS] = None,
- minor_version: Optional[int] = None,
- ) -> List["Platform"]:
- return sorted(
- [
- cls(os=os, arch=arch, minor_version=minor_version)
- for os in OS
- for arch in Arch
- if not want_os or want_os == os
- ]
- )
-
- @classmethod
- def host(cls) -> List["Platform"]:
- """Use the Python interpreter to detect the platform.
-
- We extract `os` from sys.platform and `arch` from platform.machine
-
- Returns:
- A list of parsed values which makes the signature the same as
- `Platform.all` and `Platform.from_string`.
- """
- return [
- Platform(
- os=OS.interpreter(),
- arch=Arch.interpreter(),
- minor_version=host_interpreter_minor_version(),
- )
- ]
-
- def all_specializations(self) -> Iterator["Platform"]:
- """Return the platform itself and all its unambiguous specializations.
-
- For more info about specializations see
- https://bazel.build/docs/configurable-attributes
- """
- yield self
- if self.arch is None:
- for arch in Arch:
- yield Platform(os=self.os, arch=arch, minor_version=self.minor_version)
- if self.os is None:
- for os in OS:
- yield Platform(os=os, arch=self.arch, minor_version=self.minor_version)
- if self.arch is None and self.os is None:
- for os in OS:
- for arch in Arch:
- yield Platform(os=os, arch=arch, minor_version=self.minor_version)
-
- def __lt__(self, other: Any) -> bool:
- """Add a comparison method, so that `sorted` returns the most specialized platforms first."""
- if not isinstance(other, Platform) or other is None:
- raise ValueError(f"cannot compare {other} with Platform")
-
- self_arch, self_os = _as_int(self.arch), _as_int(self.os)
- other_arch, other_os = _as_int(other.arch), _as_int(other.os)
-
- if self_os == other_os:
- return self_arch < other_arch
- else:
- return self_os < other_os
-
- def __str__(self) -> str:
- if self.minor_version is None:
- if self.os is None and self.arch is None:
- return "//conditions:default"
-
- if self.arch is None:
- return f"@platforms//os:{self.os}"
- else:
- return f"{self.os}_{self.arch}"
-
- if self.arch is None and self.os is None:
- return f"@//python/config_settings:is_python_3.{self.minor_version}"
-
- if self.arch is None:
- return f"cp3{self.minor_version}_{self.os}_anyarch"
-
- if self.os is None:
- return f"cp3{self.minor_version}_anyos_{self.arch}"
-
- return f"cp3{self.minor_version}_{self.os}_{self.arch}"
-
- @classmethod
- def from_string(cls, platform: Union[str, List[str]]) -> List["Platform"]:
- """Parse a string and return a list of platforms"""
- platform = [platform] if isinstance(platform, str) else list(platform)
- ret = set()
- for p in platform:
- if p == "host":
- ret.update(cls.host())
- continue
-
- abi, _, tail = p.partition("_")
- if not abi.startswith("cp"):
- # The first item is not an abi
- tail = p
- abi = ""
- os, _, arch = tail.partition("_")
- arch = arch or "*"
-
- minor_version = int(abi[len("cp3") :]) if abi else None
-
- if arch != "*":
- ret.add(
- cls(
- os=OS[os] if os != "*" else None,
- arch=Arch[arch],
- minor_version=minor_version,
- )
- )
-
- else:
- ret.update(
- cls.all(
- want_os=OS[os] if os != "*" else None,
- minor_version=minor_version,
- )
- )
-
- return sorted(ret)
-
- # NOTE @aignas 2023-12-05: below is the minimum number of accessors that are defined in
- # https://peps.python.org/pep-0496/ to make rules_python generate dependencies.
- #
- # WARNING: It may not work in cases where the python implementation is different between
- # different platforms.
-
- # derived from OS
- @property
- def os_name(self) -> str:
- if self.os == OS.linux or self.os == OS.osx:
- return "posix"
- elif self.os == OS.windows:
- return "nt"
- else:
- return ""
-
- @property
- def sys_platform(self) -> str:
- if self.os == OS.linux:
- return "linux"
- elif self.os == OS.osx:
- return "darwin"
- elif self.os == OS.windows:
- return "win32"
- else:
- return ""
-
- @property
- def platform_system(self) -> str:
- if self.os == OS.linux:
- return "Linux"
- elif self.os == OS.osx:
- return "Darwin"
- elif self.os == OS.windows:
- return "Windows"
- else:
- return ""
-
- # derived from OS and Arch
- @property
- def platform_machine(self) -> str:
- """Guess the target 'platform_machine' marker.
-
- NOTE @aignas 2023-12-05: this may not work on really new systems, like
- Windows if they define the platform markers in a different way.
- """
- if self.arch == Arch.x86_64:
- return "x86_64"
- elif self.arch == Arch.x86_32 and self.os != OS.osx:
- return "i386"
- elif self.arch == Arch.x86_32:
- return ""
- elif self.arch == Arch.aarch64 and self.os == OS.linux:
- return "aarch64"
- elif self.arch == Arch.aarch64:
- # Assuming that OSX and Windows use this one since the precedent is set here:
- # https://github.com/cgohlke/win_arm64-wheels
- return "arm64"
- elif self.os != OS.linux:
- return ""
- elif self.arch == Arch.ppc:
- return "ppc"
- elif self.arch == Arch.ppc64le:
- return "ppc64le"
- elif self.arch == Arch.s390x:
- return "s390x"
- else:
- return ""
-
- def env_markers(self, extra: str) -> Dict[str, str]:
- # If it is None, use the host version
- minor_version = self.minor_version or host_interpreter_minor_version()
-
- return {
- "extra": extra,
- "os_name": self.os_name,
- "sys_platform": self.sys_platform,
- "platform_machine": self.platform_machine,
- "platform_system": self.platform_system,
- "platform_release": "", # unset
- "platform_version": "", # unset
- "python_version": f"3.{minor_version}",
- # FIXME @aignas 2024-01-14: is putting zero last a good idea? Maybe we should
- # use `20` or something else to avoid having weird issues where the full version is used for
- # matching and the author decides to only support 3.y.5 upwards.
- "implementation_version": f"3.{minor_version}.0",
- "python_full_version": f"3.{minor_version}.0",
- # we assume that the following are the same as the interpreter used to setup the deps:
- # "implementation_name": "cpython"
- # "platform_python_implementation: "CPython",
- }
diff --git a/python/private/pypi/whl_installer/wheel.py b/python/private/pypi/whl_installer/wheel.py
index d95b33a194..da81b5ea9f 100644
--- a/python/private/pypi/whl_installer/wheel.py
+++ b/python/private/pypi/whl_installer/wheel.py
@@ -25,275 +25,6 @@
from packaging.requirements import Requirement
from pip._vendor.packaging.utils import canonicalize_name
-from python.private.pypi.whl_installer.platform import (
- Platform,
- host_interpreter_minor_version,
-)
-
-
-@dataclass(frozen=True)
-class FrozenDeps:
- deps: List[str]
- deps_select: Dict[str, List[str]]
-
-
-class Deps:
- """Deps is a dependency builder that has a build() method to return FrozenDeps."""
-
- def __init__(
- self,
- name: str,
- requires_dist: List[str],
- *,
- extras: Optional[Set[str]] = None,
- platforms: Optional[Set[Platform]] = None,
- ):
- """Create a new instance and parse the requires_dist
-
- Args:
- name (str): The name of the whl distribution
- requires_dist (list[Str]): The Requires-Dist from the METADATA of the whl
- distribution.
- extras (set[str], optional): The list of requested extras, defaults to None.
- platforms (set[Platform], optional): The list of target platforms, defaults to
- None. If the list of platforms has multiple `minor_version` values, it
- will change the code to generate the select statements using
- `@rules_python//python/config_settings:is_python_3.y` conditions.
- """
- self.name: str = Deps._normalize(name)
- self._platforms: Set[Platform] = platforms or set()
- self._target_versions = {p.minor_version for p in platforms or {}}
- self._default_minor_version = None
- if platforms and len(self._target_versions) > 2:
- # TODO @aignas 2024-06-23: enable this to be set via a CLI arg
- # for being more explicit.
- self._default_minor_version = host_interpreter_minor_version()
-
- if None in self._target_versions and len(self._target_versions) > 2:
- raise ValueError(
- f"all python versions need to be specified explicitly, got: {platforms}"
- )
-
- # Sort so that the dictionary order in the FrozenDeps is deterministic
- # without the final sort because Python retains insertion order. That way
- # the sorting by platform is limited within the Platform class itself and
- # the unit-tests for the Deps can be simpler.
- reqs = sorted(
- (Requirement(wheel_req) for wheel_req in requires_dist),
- key=lambda x: f"{x.name}:{sorted(x.extras)}",
- )
-
- want_extras = self._resolve_extras(reqs, extras)
-
- # Then add all of the requirements in order
- self._deps: Set[str] = set()
- self._select: Dict[Platform, Set[str]] = defaultdict(set)
- for req in reqs:
- self._add_req(req, want_extras)
-
- def _add(self, dep: str, platform: Optional[Platform]):
- dep = Deps._normalize(dep)
-
- # Self-edges are processed in _resolve_extras
- if dep == self.name:
- return
-
- if not platform:
- self._deps.add(dep)
-
- # If the dep is in the platform-specific list, remove it from the select.
- pop_keys = []
- for p, deps in self._select.items():
- if dep not in deps:
- continue
-
- deps.remove(dep)
- if not deps:
- pop_keys.append(p)
-
- for p in pop_keys:
- self._select.pop(p)
- return
-
- if dep in self._deps:
- # If the dep is already in the main dependency list, no need to add it in the
- # platform-specific dependency list.
- return
-
- # Add the platform-specific dep
- self._select[platform].add(dep)
-
- # Add the dep to specializations of the given platform if they
- # exist in the select statement.
- for p in platform.all_specializations():
- if p not in self._select:
- continue
-
- self._select[p].add(dep)
-
- if len(self._select[platform]) == 1:
- # We are adding a new item to the select and we need to ensure that
- # existing dependencies from less specialized platforms are propagated
- # to the newly added dependency set.
- for p, deps in self._select.items():
- # Check if the existing platform overlaps with the given platform
- if p == platform or platform not in p.all_specializations():
- continue
-
- self._select[platform].update(self._select[p])
-
- def _maybe_add_common_dep(self, dep):
- if len(self._target_versions) < 2:
- return
-
- platforms = [Platform()] + [
- Platform(minor_version=v) for v in self._target_versions
- ]
-
- # If the dep is targeting all target python versions, lets add it to
- # the common dependency list to simplify the select statements.
- for p in platforms:
- if p not in self._select:
- return
-
- if dep not in self._select[p]:
- return
-
- # All of the python version-specific branches have the dep, so lets add
- # it to the common deps.
- self._deps.add(dep)
- for p in platforms:
- self._select[p].remove(dep)
- if not self._select[p]:
- self._select.pop(p)
-
- @staticmethod
- def _normalize(name: str) -> str:
- return re.sub(r"[-_.]+", "_", name).lower()
-
- def _resolve_extras(
- self, reqs: List[Requirement], extras: Optional[Set[str]]
- ) -> Set[str]:
- """Resolve extras which are due to depending on self[some_other_extra].
-
- Some packages may have cyclic dependencies resulting from extras being used, one example is
- `etils`, where we have one set of extras as aliases for other extras
- and we have an extra called 'all' that includes all other extras.
-
- Example: github.com/google/etils/blob/a0b71032095db14acf6b33516bca6d885fe09e35/pyproject.toml#L32.
-
- When the `requirements.txt` is generated by `pip-tools`, then it is likely that
- this step is not needed, but for other `requirements.txt` files this may be useful.
-
- NOTE @aignas 2023-12-08: the extra resolution is not platform dependent,
- but in order for it to become platform dependent we would have to have
- separate targets for each extra in extras.
- """
-
- # Resolve any extra extras due to self-edges, empty string means no
- # extras The empty string in the set is just a way to make the handling
- # of no extras and a single extra easier and having a set of {"", "foo"}
- # is equivalent to having {"foo"}.
- extras = extras or {""}
-
- self_reqs = []
- for req in reqs:
- if Deps._normalize(req.name) != self.name:
- continue
-
- if req.marker is None:
- # I am pretty sure we cannot reach this code as it does not
- # make sense to specify packages in this way, but since it is
- # easy to handle, lets do it.
- #
- # TODO @aignas 2023-12-08: add a test
- extras = extras | req.extras
- else:
- # process these in a separate loop
- self_reqs.append(req)
-
- # A double loop is not strictly optimal, but always correct without recursion
- for req in self_reqs:
- if any(req.marker.evaluate({"extra": extra}) for extra in extras):
- extras = extras | req.extras
- else:
- continue
-
- # Iterate through all packages to ensure that we include all of the extras from previously
- # visited packages.
- for req_ in self_reqs:
- if any(req_.marker.evaluate({"extra": extra}) for extra in extras):
- extras = extras | req_.extras
-
- return extras
-
- def _add_req(self, req: Requirement, extras: Set[str]) -> None:
- if req.marker is None:
- self._add(req.name, None)
- return
-
- marker_str = str(req.marker)
-
- if not self._platforms:
- if any(req.marker.evaluate({"extra": extra}) for extra in extras):
- self._add(req.name, None)
- return
-
- # NOTE @aignas 2023-12-08: in order to have reasonable select statements
- # we do have to have some parsing of the markers, so it begs the question
- # if packaging should be reimplemented in Starlark to have the best solution
- # for now we will implement it in Python and see what the best parsing result
- # can be before making this decision.
- match_os = any(
- tag in marker_str
- for tag in [
- "os_name",
- "sys_platform",
- "platform_system",
- ]
- )
- match_arch = "platform_machine" in marker_str
- match_version = "version" in marker_str
-
- if not (match_os or match_arch or match_version):
- if any(req.marker.evaluate({"extra": extra}) for extra in extras):
- self._add(req.name, None)
- return
-
- for plat in self._platforms:
- if not any(
- req.marker.evaluate(plat.env_markers(extra)) for extra in extras
- ):
- continue
-
- if match_arch and self._default_minor_version:
- self._add(req.name, plat)
- if plat.minor_version == self._default_minor_version:
- self._add(req.name, Platform(plat.os, plat.arch))
- elif match_arch:
- self._add(req.name, Platform(plat.os, plat.arch))
- elif match_os and self._default_minor_version:
- self._add(req.name, Platform(plat.os, minor_version=plat.minor_version))
- if plat.minor_version == self._default_minor_version:
- self._add(req.name, Platform(plat.os))
- elif match_os:
- self._add(req.name, Platform(plat.os))
- elif match_version and self._default_minor_version:
- self._add(req.name, Platform(minor_version=plat.minor_version))
- if plat.minor_version == self._default_minor_version:
- self._add(req.name, Platform())
- elif match_version:
- self._add(req.name, None)
-
- # Merge to common if possible after processing all platforms
- self._maybe_add_common_dep(req.name)
-
- def build(self) -> FrozenDeps:
- return FrozenDeps(
- deps=sorted(self._deps),
- deps_select={str(p): sorted(deps) for p, deps in self._select.items()},
- )
-
class Wheel:
"""Representation of the compressed .whl file"""
@@ -344,18 +75,6 @@ def entry_points(self) -> Dict[str, Tuple[str, str]]:
return entry_points_mapping
- def dependencies(
- self,
- extras_requested: Set[str] = None,
- platforms: Optional[Set[Platform]] = None,
- ) -> FrozenDeps:
- return Deps(
- self.name,
- extras=extras_requested,
- platforms=platforms,
- requires_dist=self.metadata.get_all("Requires-Dist", []),
- ).build()
-
def unzip(self, directory: str) -> None:
installation_schemes = {
"purelib": "/site-packages",
diff --git a/python/private/pypi/whl_installer/wheel_installer.py b/python/private/pypi/whl_installer/wheel_installer.py
index ef8181c30d..c7695d92e8 100644
--- a/python/private/pypi/whl_installer/wheel_installer.py
+++ b/python/private/pypi/whl_installer/wheel_installer.py
@@ -23,7 +23,7 @@
import sys
from pathlib import Path
from tempfile import NamedTemporaryFile
-from typing import Dict, List, Optional, Set, Tuple
+from typing import Dict, Optional, Set, Tuple
from pip._vendor.packaging.utils import canonicalize_name
@@ -103,9 +103,7 @@ def _setup_namespace_pkg_compatibility(wheel_dir: str) -> None:
def _extract_wheel(
wheel_file: str,
- extras: Dict[str, Set[str]],
enable_implicit_namespace_pkgs: bool,
- platforms: List[wheel.Platform],
installation_dir: Path = Path("."),
) -> None:
"""Extracts wheel into given directory and creates py_library and filegroup targets.
@@ -113,7 +111,6 @@ def _extract_wheel(
Args:
wheel_file: the filepath of the .whl
installation_dir: the destination directory for installation of the wheel.
- extras: a list of extras to add as dependencies for the installed wheel
enable_implicit_namespace_pkgs: if true, disables conversion of implicit namespace packages and will unzip as-is
"""
@@ -123,25 +120,19 @@ def _extract_wheel(
if not enable_implicit_namespace_pkgs:
_setup_namespace_pkg_compatibility(installation_dir)
- extras_requested = extras[whl.name] if whl.name in extras else set()
-
- dependencies = whl.dependencies(extras_requested, platforms)
+ metadata = {
+ "python_version": sys.version.partition(" ")[0],
+ "entry_points": [
+ {
+ "name": name,
+ "module": module,
+ "attribute": attribute,
+ }
+ for name, (module, attribute) in sorted(whl.entry_points().items())
+ ],
+ }
with open(os.path.join(installation_dir, "metadata.json"), "w") as f:
- metadata = {
- "name": whl.name,
- "version": whl.version,
- "deps": dependencies.deps,
- "deps_by_platform": dependencies.deps_select,
- "entry_points": [
- {
- "name": name,
- "module": module,
- "attribute": attribute,
- }
- for name, (module, attribute) in sorted(whl.entry_points().items())
- ],
- }
json.dump(metadata, f)
@@ -155,13 +146,9 @@ def main() -> None:
if args.whl_file:
whl = Path(args.whl_file)
- name, extras_for_pkg = _parse_requirement_for_extra(args.requirement)
- extras = {name: extras_for_pkg} if extras_for_pkg and name else dict()
_extract_wheel(
wheel_file=whl,
- extras=extras,
enable_implicit_namespace_pkgs=args.enable_implicit_namespace_pkgs,
- platforms=arguments.get_platforms(args),
)
return
diff --git a/python/private/pypi/whl_library.bzl b/python/private/pypi/whl_library.bzl
index 493f11353e..54f9ff3909 100644
--- a/python/private/pypi/whl_library.bzl
+++ b/python/private/pypi/whl_library.bzl
@@ -21,9 +21,13 @@ load("//python/private:repo_utils.bzl", "REPO_DEBUG_ENV_VAR", "repo_utils")
load(":attrs.bzl", "ATTRS", "use_isolated")
load(":deps.bzl", "all_repo_names", "record_files")
load(":generate_whl_library_build_bazel.bzl", "generate_whl_library_build_bazel")
+load(":parse_requirements.bzl", "host_platform")
load(":parse_whl_name.bzl", "parse_whl_name")
load(":patch_whl.bzl", "patch_whl")
+load(":pep508_deps.bzl", "deps")
+load(":pep508_requirement.bzl", "requirement")
load(":pypi_repo_utils.bzl", "pypi_repo_utils")
+load(":whl_metadata.bzl", "whl_metadata")
load(":whl_target_platforms.bzl", "whl_target_platforms")
_CPPFLAGS = "CPPFLAGS"
@@ -361,7 +365,7 @@ def _whl_library_impl(rctx):
arguments = args + [
"--whl-file",
whl_path,
- ] + ["--platform={}".format(p) for p in target_platforms],
+ ],
srcs = rctx.attr._python_srcs,
environment = environment,
quiet = rctx.attr.quiet,
@@ -396,17 +400,60 @@ def _whl_library_impl(rctx):
)
entry_points[entry_point_without_py] = entry_point_script_name
+ # TODO @aignas 2025-04-04: move this to whl_library_targets.bzl to have
+ # this in the analysis phase.
+ #
+ # This means that whl_library_targets will have to accept the following args:
+ # * name - the name of the package in the METADATA.
+ # * requires_dist - the list of METADATA Requires-Dist.
+ # * platforms - the list of target platforms. The target_platforms
+ # should come from the hub repo via a 'load' statement so that they don't
+ # need to be passed as an argument to `whl_library`.
+ # * extras - the list of required extras. This comes from the
+ # `rctx.attr.requirement` for now. In the future the required extras could
+ # stay in the hub repo, where we calculate the extra aliases that we need
+ # to create automatically and this way expose the targets for the specific
+ # extras. The first step will be to generate a target per extra for the
+ # `py_library` and `filegroup`. Maybe we need to have a special provider
+ # or an output group so that we can return the `whl` file from the
+ # `py_library` target? filegroup can use output groups to expose files.
+ # * host_python_version/versons - the list of python versions to support
+ # should come from the hub, similar to how the target platforms are specified.
+ #
+ # Extra things that we should move at the same time:
+ # * group_name, group_deps - this info can stay in the hub repository so that
+ # it is piped at the analysis time and changing the requirement groups does
+ # cause to re-fetch the deps.
+ python_version = metadata["python_version"]
+ metadata = whl_metadata(
+ install_dir = rctx.path("site-packages"),
+ read_fn = rctx.read,
+ logger = logger,
+ )
+
+ # TODO @aignas 2025-04-09: this will later be removed when loaded through the hub
+ major_minor, _, _ = python_version.rpartition(".")
+ package_deps = deps(
+ name = metadata.name,
+ requires_dist = metadata.requires_dist,
+ platforms = target_platforms or [
+ "cp{}_{}".format(major_minor.replace(".", ""), host_platform(rctx)),
+ ],
+ extras = requirement(rctx.attr.requirement).extras,
+ host_python_version = python_version,
+ )
+
build_file_contents = generate_whl_library_build_bazel(
name = whl_path.basename,
dep_template = rctx.attr.dep_template or "@{}{{name}}//:{{target}}".format(rctx.attr.repo_prefix),
- dependencies = metadata["deps"],
- dependencies_by_platform = metadata["deps_by_platform"],
+ dependencies = package_deps.deps,
+ dependencies_by_platform = package_deps.deps_select,
group_name = rctx.attr.group_name,
group_deps = rctx.attr.group_deps,
data_exclude = rctx.attr.pip_data_exclude,
tags = [
- "pypi_name=" + metadata["name"],
- "pypi_version=" + metadata["version"],
+ "pypi_name=" + metadata.name,
+ "pypi_version=" + metadata.version,
],
entry_points = entry_points,
annotation = None if not rctx.attr.annotation else struct(**json.decode(rctx.read(rctx.attr.annotation))),
diff --git a/python/private/pypi/whl_library_targets.bzl b/python/private/pypi/whl_library_targets.bzl
index 95031e6181..d32746b604 100644
--- a/python/private/pypi/whl_library_targets.bzl
+++ b/python/private/pypi/whl_library_targets.bzl
@@ -90,8 +90,6 @@ def whl_library_targets(
native: {type}`native` The native struct for overriding in tests.
rules: {type}`struct` A struct with references to rules for creating targets.
"""
- _ = name # buildifier: @unused
-
dependencies = sorted([normalize_name(d) for d in dependencies])
dependencies_by_platform = {
platform: sorted([normalize_name(d) for d in deps])
diff --git a/python/private/pypi/whl_metadata.bzl b/python/private/pypi/whl_metadata.bzl
new file mode 100644
index 0000000000..8a86ffbff1
--- /dev/null
+++ b/python/private/pypi/whl_metadata.bzl
@@ -0,0 +1,108 @@
+"""A simple function to find the METADATA file and parse it"""
+
+_NAME = "Name: "
+_PROVIDES_EXTRA = "Provides-Extra: "
+_REQUIRES_DIST = "Requires-Dist: "
+_VERSION = "Version: "
+
+def whl_metadata(*, install_dir, read_fn, logger):
+ """Find and parse the METADATA file in the extracted whl contents dir.
+
+ Args:
+ install_dir: {type}`path` location where the wheel has been extracted.
+ read_fn: the function used to read files.
+ logger: the function used to log failures.
+
+ Returns:
+ A struct with parsed values:
+ * `name`: {type}`str` the name of the wheel.
+ * `version`: {type}`str` the version of the wheel.
+ * `requires_dist`: {type}`list[str]` the list of requirements.
+ * `provides_extra`: {type}`list[str]` the list of extras that this package
+ provides.
+ """
+ metadata_file = find_whl_metadata(install_dir = install_dir, logger = logger)
+ contents = read_fn(metadata_file)
+ result = parse_whl_metadata(contents)
+
+ if not (result.name and result.version):
+ logger.fail("Failed to parsed the wheel METADATA file:\n{}".format(contents))
+ return None
+
+ return result
+
+def parse_whl_metadata(contents):
+ """Parse .whl METADATA file
+
+ Args:
+ contents: {type}`str` the contents of the file.
+
+ Returns:
+ A struct with parsed values:
+ * `name`: {type}`str` the name of the wheel.
+ * `version`: {type}`str` the version of the wheel.
+ * `requires_dist`: {type}`list[str]` the list of requirements.
+ * `provides_extra`: {type}`list[str]` the list of extras that this package
+ provides.
+ """
+ parsed = {
+ "name": "",
+ "provides_extra": [],
+ "requires_dist": [],
+ "version": "",
+ }
+ for line in contents.strip().split("\n"):
+ if not line.strip():
+ # Stop parsing on first empty line, which marks the end of the
+ # headers containing the metadata.
+ break
+
+ if line.startswith(_NAME):
+ _, _, value = line.partition(_NAME)
+ parsed["name"] = value.strip()
+ elif line.startswith(_VERSION):
+ _, _, value = line.partition(_VERSION)
+ parsed["version"] = value.strip()
+ elif line.startswith(_REQUIRES_DIST):
+ _, _, value = line.partition(_REQUIRES_DIST)
+ parsed["requires_dist"].append(value.strip(" "))
+ elif line.startswith(_PROVIDES_EXTRA):
+ _, _, value = line.partition(_PROVIDES_EXTRA)
+ parsed["provides_extra"].append(value.strip(" "))
+
+ return struct(
+ name = parsed["name"],
+ provides_extra = parsed["provides_extra"],
+ requires_dist = parsed["requires_dist"],
+ version = parsed["version"],
+ )
+
+def find_whl_metadata(*, install_dir, logger):
+ """Find the whl METADATA file in the install_dir.
+
+ Args:
+ install_dir: {type}`path` location where the wheel has been extracted.
+ logger: the function used to log failures.
+
+ Returns:
+ {type}`path` The path to the METADATA file.
+ """
+ dist_info = None
+ for maybe_dist_info in install_dir.readdir():
+ # first find the ".dist-info" folder
+ if not (maybe_dist_info.is_dir and maybe_dist_info.basename.endswith(".dist-info")):
+ continue
+
+ dist_info = maybe_dist_info
+ metadata_file = dist_info.get_child("METADATA")
+
+ if metadata_file.exists:
+ return metadata_file
+
+ break
+
+ if dist_info:
+ logger.fail("The METADATA file for the wheel could not be found in '{}/{}'".format(install_dir.basename, dist_info.basename))
+ else:
+ logger.fail("The '*.dist-info' directory could not be found in '{}'".format(install_dir.basename))
+ return None
diff --git a/tests/pypi/pep508/BUILD.bazel b/tests/pypi/pep508/BUILD.bazel
index 575f28ada6..7eab2e096a 100644
--- a/tests/pypi/pep508/BUILD.bazel
+++ b/tests/pypi/pep508/BUILD.bazel
@@ -1,6 +1,11 @@
+load(":deps_tests.bzl", "deps_test_suite")
load(":evaluate_tests.bzl", "evaluate_test_suite")
load(":requirement_tests.bzl", "requirement_test_suite")
+deps_test_suite(
+ name = "deps_tests",
+)
+
evaluate_test_suite(
name = "evaluate_tests",
)
diff --git a/tests/pypi/pep508/deps_tests.bzl b/tests/pypi/pep508/deps_tests.bzl
new file mode 100644
index 0000000000..44031ab6a5
--- /dev/null
+++ b/tests/pypi/pep508/deps_tests.bzl
@@ -0,0 +1,385 @@
+# Copyright 2025 The Bazel Authors. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Tests for construction of Python version matching config settings."""
+
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("//python/private/pypi:pep508_deps.bzl", "deps") # buildifier: disable=bzl-visibility
+
+_tests = []
+
+def test_simple_deps(env):
+ got = deps(
+ "foo",
+ requires_dist = ["bar-Bar"],
+ )
+ env.expect.that_collection(got.deps).contains_exactly(["bar_bar"])
+ env.expect.that_dict(got.deps_select).contains_exactly({})
+
+_tests.append(test_simple_deps)
+
+def test_can_add_os_specific_deps(env):
+ got = deps(
+ "foo",
+ requires_dist = [
+ "bar",
+ "an_osx_dep; sys_platform=='darwin'",
+ "posix_dep; os_name=='posix'",
+ "win_dep; os_name=='nt'",
+ ],
+ platforms = [
+ "linux_x86_64",
+ "osx_x86_64",
+ "osx_aarch64",
+ "windows_x86_64",
+ ],
+ host_python_version = "3.3.1",
+ )
+
+ env.expect.that_collection(got.deps).contains_exactly(["bar"])
+ env.expect.that_dict(got.deps_select).contains_exactly({
+ "@platforms//os:linux": ["posix_dep"],
+ "@platforms//os:osx": ["an_osx_dep", "posix_dep"],
+ "@platforms//os:windows": ["win_dep"],
+ })
+
+_tests.append(test_can_add_os_specific_deps)
+
+def test_can_add_os_specific_deps_with_python_version(env):
+ got = deps(
+ "foo",
+ requires_dist = [
+ "bar",
+ "an_osx_dep; sys_platform=='darwin'",
+ "posix_dep; os_name=='posix'",
+ "win_dep; os_name=='nt'",
+ ],
+ platforms = [
+ "cp33_linux_x86_64",
+ "cp33_osx_x86_64",
+ "cp33_osx_aarch64",
+ "cp33_windows_x86_64",
+ ],
+ )
+
+ env.expect.that_collection(got.deps).contains_exactly(["bar"])
+ env.expect.that_dict(got.deps_select).contains_exactly({
+ "@platforms//os:linux": ["posix_dep"],
+ "@platforms//os:osx": ["an_osx_dep", "posix_dep"],
+ "@platforms//os:windows": ["win_dep"],
+ })
+
+_tests.append(test_can_add_os_specific_deps_with_python_version)
+
+def test_deps_are_added_to_more_specialized_platforms(env):
+ got = deps(
+ "foo",
+ requires_dist = [
+ "m1_dep; sys_platform=='darwin' and platform_machine=='arm64'",
+ "mac_dep; sys_platform=='darwin'",
+ ],
+ platforms = [
+ "osx_x86_64",
+ "osx_aarch64",
+ ],
+ host_python_version = "3.8.4",
+ )
+
+ env.expect.that_collection(got.deps).contains_exactly([])
+ env.expect.that_dict(got.deps_select).contains_exactly({
+ "@platforms//os:osx": ["mac_dep"],
+ "osx_aarch64": ["m1_dep", "mac_dep"],
+ })
+
+_tests.append(test_deps_are_added_to_more_specialized_platforms)
+
+def test_deps_from_more_specialized_platforms_are_propagated(env):
+ got = deps(
+ "foo",
+ requires_dist = [
+ "a_mac_dep; sys_platform=='darwin'",
+ "m1_dep; sys_platform=='darwin' and platform_machine=='arm64'",
+ ],
+ platforms = [
+ "osx_x86_64",
+ "osx_aarch64",
+ ],
+ host_python_version = "3.8.4",
+ )
+
+ env.expect.that_collection(got.deps).contains_exactly([])
+ env.expect.that_dict(got.deps_select).contains_exactly(
+ {
+ "@platforms//os:osx": ["a_mac_dep"],
+ "osx_aarch64": ["a_mac_dep", "m1_dep"],
+ },
+ )
+
+_tests.append(test_deps_from_more_specialized_platforms_are_propagated)
+
+def test_non_platform_markers_are_added_to_common_deps(env):
+ got = deps(
+ "foo",
+ requires_dist = [
+ "bar",
+ "baz; implementation_name=='cpython'",
+ "m1_dep; sys_platform=='darwin' and platform_machine=='arm64'",
+ ],
+ platforms = [
+ "linux_x86_64",
+ "osx_x86_64",
+ "osx_aarch64",
+ "windows_x86_64",
+ ],
+ host_python_version = "3.8.4",
+ )
+
+ env.expect.that_collection(got.deps).contains_exactly(["bar", "baz"])
+ env.expect.that_dict(got.deps_select).contains_exactly({
+ "osx_aarch64": ["m1_dep"],
+ })
+
+_tests.append(test_non_platform_markers_are_added_to_common_deps)
+
+def test_self_is_ignored(env):
+ got = deps(
+ "foo",
+ requires_dist = [
+ "bar",
+ "req_dep; extra == 'requests'",
+ "foo[requests]; extra == 'ssl'",
+ "ssl_lib; extra == 'ssl'",
+ ],
+ extras = ["ssl"],
+ )
+
+ env.expect.that_collection(got.deps).contains_exactly(["bar", "req_dep", "ssl_lib"])
+ env.expect.that_dict(got.deps_select).contains_exactly({})
+
+_tests.append(test_self_is_ignored)
+
+def test_self_dependencies_can_come_in_any_order(env):
+ got = deps(
+ "foo",
+ requires_dist = [
+ "bar",
+ "baz; extra == 'feat'",
+ "foo[feat2]; extra == 'all'",
+ "foo[feat]; extra == 'feat2'",
+ "zdep; extra == 'all'",
+ ],
+ extras = ["all"],
+ )
+
+ env.expect.that_collection(got.deps).contains_exactly(["bar", "baz", "zdep"])
+ env.expect.that_dict(got.deps_select).contains_exactly({})
+
+_tests.append(test_self_dependencies_can_come_in_any_order)
+
+def _test_can_get_deps_based_on_specific_python_version(env):
+ requires_dist = [
+ "bar",
+ "baz; python_version < '3.8'",
+ "posix_dep; os_name=='posix' and python_version >= '3.8'",
+ ]
+
+ py38 = deps(
+ "foo",
+ requires_dist = requires_dist,
+ platforms = ["cp38_linux_x86_64"],
+ )
+ py37 = deps(
+ "foo",
+ requires_dist = requires_dist,
+ platforms = ["cp37_linux_x86_64"],
+ )
+
+ env.expect.that_collection(py37.deps).contains_exactly(["bar", "baz"])
+ env.expect.that_dict(py37.deps_select).contains_exactly({})
+ env.expect.that_collection(py38.deps).contains_exactly(["bar"])
+ env.expect.that_dict(py38.deps_select).contains_exactly({"@platforms//os:linux": ["posix_dep"]})
+
+_tests.append(_test_can_get_deps_based_on_specific_python_version)
+
+def _test_no_version_select_when_single_version(env):
+ requires_dist = [
+ "bar",
+ "baz; python_version >= '3.8'",
+ "posix_dep; os_name=='posix'",
+ "posix_dep_with_version; os_name=='posix' and python_version >= '3.8'",
+ "arch_dep; platform_machine=='x86_64' and python_version >= '3.8'",
+ ]
+ host_python_version = "3.7.5"
+
+ got = deps(
+ "foo",
+ requires_dist = requires_dist,
+ platforms = [
+ "cp38_linux_x86_64",
+ "cp38_windows_x86_64",
+ ],
+ host_python_version = host_python_version,
+ )
+
+ env.expect.that_collection(got.deps).contains_exactly(["bar", "baz"])
+ env.expect.that_dict(got.deps_select).contains_exactly({
+ "@platforms//os:linux": ["posix_dep", "posix_dep_with_version"],
+ "linux_x86_64": ["arch_dep", "posix_dep", "posix_dep_with_version"],
+ "windows_x86_64": ["arch_dep"],
+ })
+
+_tests.append(_test_no_version_select_when_single_version)
+
+def _test_can_get_version_select(env):
+ requires_dist = [
+ "bar",
+ "baz; python_version < '3.8'",
+ "baz_new; python_version >= '3.8'",
+ "posix_dep; os_name=='posix'",
+ "posix_dep_with_version; os_name=='posix' and python_version >= '3.8'",
+ "arch_dep; platform_machine=='x86_64' and python_version < '3.8'",
+ ]
+ host_python_version = "3.7.4"
+
+ got = deps(
+ "foo",
+ requires_dist = requires_dist,
+ platforms = [
+ "cp3{}_{}_x86_64".format(minor, os)
+ for minor in [7, 8, 9]
+ for os in ["linux", "windows"]
+ ],
+ host_python_version = host_python_version,
+ )
+
+ env.expect.that_collection(got.deps).contains_exactly(["bar"])
+ env.expect.that_dict(got.deps_select).contains_exactly({
+ str(Label("//python/config_settings:is_python_3.7")): ["baz"],
+ str(Label("//python/config_settings:is_python_3.8")): ["baz_new"],
+ str(Label("//python/config_settings:is_python_3.9")): ["baz_new"],
+ "@platforms//os:linux": ["baz", "posix_dep"],
+ "cp37_linux_anyarch": ["baz", "posix_dep"],
+ "cp37_linux_x86_64": ["arch_dep", "baz", "posix_dep"],
+ "cp37_windows_x86_64": ["arch_dep", "baz"],
+ "cp38_linux_anyarch": [
+ "baz_new",
+ "posix_dep",
+ "posix_dep_with_version",
+ ],
+ "cp39_linux_anyarch": [
+ "baz_new",
+ "posix_dep",
+ "posix_dep_with_version",
+ ],
+ "linux_x86_64": ["arch_dep", "baz", "posix_dep"],
+ "windows_x86_64": ["arch_dep", "baz"],
+ "//conditions:default": ["baz"],
+ })
+
+_tests.append(_test_can_get_version_select)
+
+def _test_deps_spanning_all_target_py_versions_are_added_to_common(env):
+ requires_dist = [
+ "bar",
+ "baz (<2,>=1.11) ; python_version < '3.8'",
+ "baz (<2,>=1.14) ; python_version >= '3.8'",
+ ]
+ host_python_version = "3.8.4"
+
+ got = deps(
+ "foo",
+ requires_dist = requires_dist,
+ platforms = [
+ "cp3{}_linux_x86_64".format(minor)
+ for minor in [7, 8, 9]
+ ],
+ host_python_version = host_python_version,
+ )
+
+ env.expect.that_collection(got.deps).contains_exactly(["bar", "baz"])
+ env.expect.that_dict(got.deps_select).contains_exactly({})
+
+_tests.append(_test_deps_spanning_all_target_py_versions_are_added_to_common)
+
+def _test_deps_are_not_duplicated(env):
+ host_python_version = "3.7.4"
+
+ # See an example in
+ # https://files.pythonhosted.org/packages/76/9e/db1c2d56c04b97981c06663384f45f28950a73d9acf840c4006d60d0a1ff/opencv_python-4.9.0.80-cp37-abi3-win32.whl.metadata
+ requires_dist = [
+ "bar >=0.1.0 ; python_version < '3.7'",
+ "bar >=0.2.0 ; python_version >= '3.7'",
+ "bar >=0.4.0 ; python_version >= '3.6' and platform_system == 'Linux' and platform_machine == 'aarch64'",
+ "bar >=0.4.0 ; python_version >= '3.9'",
+ "bar >=0.5.0 ; python_version <= '3.9' and platform_system == 'Darwin' and platform_machine == 'arm64'",
+ "bar >=0.5.0 ; python_version >= '3.10' and platform_system == 'Darwin'",
+ "bar >=0.5.0 ; python_version >= '3.10'",
+ "bar >=0.6.0 ; python_version >= '3.11'",
+ ]
+
+ got = deps(
+ "foo",
+ requires_dist = requires_dist,
+ platforms = [
+ "cp3{}_{}_{}".format(minor, os, arch)
+ for minor in [7, 10]
+ for os in ["linux", "osx", "windows"]
+ for arch in ["x86_64", "aarch64"]
+ ],
+ host_python_version = host_python_version,
+ )
+
+ env.expect.that_collection(got.deps).contains_exactly(["bar"])
+ env.expect.that_dict(got.deps_select).contains_exactly({})
+
+_tests.append(_test_deps_are_not_duplicated)
+
+def _test_deps_are_not_duplicated_when_encountering_platform_dep_first(env):
+ host_python_version = "3.7.1"
+
+ # Note, that we are sorting the incoming `requires_dist` and we need to ensure that we are not getting any
+ # issues even if the platform-specific line comes first.
+ requires_dist = [
+ "bar >=0.4.0 ; python_version >= '3.6' and platform_system == 'Linux' and platform_machine == 'aarch64'",
+ "bar >=0.5.0 ; python_version >= '3.9'",
+ ]
+
+ got = deps(
+ "foo",
+ requires_dist = requires_dist,
+ platforms = [
+ "cp37_linux_aarch64",
+ "cp37_linux_x86_64",
+ "cp310_linux_aarch64",
+ "cp310_linux_x86_64",
+ ],
+ host_python_version = host_python_version,
+ )
+
+ # TODO @aignas 2025-02-24: this test case in the python version is passing but
+ # I am not sure why. The starlark version behaviour looks more correct.
+ env.expect.that_collection(got.deps).contains_exactly([])
+ env.expect.that_dict(got.deps_select).contains_exactly({
+ str(Label("//python/config_settings:is_python_3.10")): ["bar"],
+ "cp310_linux_aarch64": ["bar"],
+ "cp37_linux_aarch64": ["bar"],
+ "linux_aarch64": ["bar"],
+ })
+
+_tests.append(_test_deps_are_not_duplicated_when_encountering_platform_dep_first)
+
+def deps_test_suite(name): # buildifier: disable=function-docstring
+ test_suite(
+ name = name,
+ basic_tests = _tests,
+ )
diff --git a/tests/pypi/pep508/evaluate_tests.bzl b/tests/pypi/pep508/evaluate_tests.bzl
index 80b70f4dad..14e5e40b43 100644
--- a/tests/pypi/pep508/evaluate_tests.bzl
+++ b/tests/pypi/pep508/evaluate_tests.bzl
@@ -148,6 +148,8 @@ def _logical_expression_tests(env):
# expr
"os_name == 'fo'": False,
"(os_name == 'fo')": False,
+ "((os_name == 'fo'))": False,
+ "((os_name == 'foo'))": True,
"not (os_name == 'fo')": True,
# and
diff --git a/tests/pypi/whl_installer/BUILD.bazel b/tests/pypi/whl_installer/BUILD.bazel
index 040e4d765f..fea6a46d01 100644
--- a/tests/pypi/whl_installer/BUILD.bazel
+++ b/tests/pypi/whl_installer/BUILD.bazel
@@ -27,18 +27,6 @@ py_test(
],
)
-py_test(
- name = "platform_test",
- size = "small",
- srcs = [
- "platform_test.py",
- ],
- data = ["//examples/wheel:minimal_with_py_package"],
- deps = [
- ":lib",
- ],
-)
-
py_test(
name = "wheel_installer_test",
size = "small",
@@ -50,15 +38,3 @@ py_test(
":lib",
],
)
-
-py_test(
- name = "wheel_test",
- size = "small",
- srcs = [
- "wheel_test.py",
- ],
- data = ["//examples/wheel:minimal_with_py_package"],
- deps = [
- ":lib",
- ],
-)
diff --git a/tests/pypi/whl_installer/arguments_test.py b/tests/pypi/whl_installer/arguments_test.py
index 5538054a59..9f73ae96a9 100644
--- a/tests/pypi/whl_installer/arguments_test.py
+++ b/tests/pypi/whl_installer/arguments_test.py
@@ -15,7 +15,7 @@
import json
import unittest
-from python.private.pypi.whl_installer import arguments, wheel
+from python.private.pypi.whl_installer import arguments
class ArgumentsTestCase(unittest.TestCase):
@@ -49,18 +49,6 @@ def test_deserialize_structured_args(self) -> None:
self.assertEqual(args["environment"], {"PIP_DO_SOMETHING": "True"})
self.assertEqual(args["extra_pip_args"], [])
- def test_platform_aggregation(self) -> None:
- parser = arguments.parser()
- args = parser.parse_args(
- args=[
- "--platform=linux_*",
- "--platform=osx_*",
- "--platform=windows_*",
- "--requirement=foo",
- ]
- )
- self.assertEqual(set(wheel.Platform.all()), arguments.get_platforms(args))
-
if __name__ == "__main__":
unittest.main()
diff --git a/tests/pypi/whl_installer/platform_test.py b/tests/pypi/whl_installer/platform_test.py
deleted file mode 100644
index 2aeb4caa69..0000000000
--- a/tests/pypi/whl_installer/platform_test.py
+++ /dev/null
@@ -1,154 +0,0 @@
-import unittest
-from random import shuffle
-
-from python.private.pypi.whl_installer.platform import (
- OS,
- Arch,
- Platform,
- host_interpreter_minor_version,
-)
-
-
-class MinorVersionTest(unittest.TestCase):
- def test_host(self):
- host = host_interpreter_minor_version()
- self.assertIsNotNone(host)
-
-
-class PlatformTest(unittest.TestCase):
- def test_can_get_host(self):
- host = Platform.host()
- self.assertIsNotNone(host)
- self.assertEqual(1, len(Platform.from_string("host")))
- self.assertEqual(host, Platform.from_string("host"))
-
- def test_can_get_linux_x86_64_without_py_version(self):
- got = Platform.from_string("linux_x86_64")
- want = Platform(os=OS.linux, arch=Arch.x86_64)
- self.assertEqual(want, got[0])
-
- def test_can_get_specific_from_string(self):
- got = Platform.from_string("cp33_linux_x86_64")
- want = Platform(os=OS.linux, arch=Arch.x86_64, minor_version=3)
- self.assertEqual(want, got[0])
-
- def test_can_get_all_for_py_version(self):
- cp39 = Platform.all(minor_version=9)
- self.assertEqual(21, len(cp39), f"Got {cp39}")
- self.assertEqual(cp39, Platform.from_string("cp39_*"))
-
- def test_can_get_all_for_os(self):
- linuxes = Platform.all(OS.linux, minor_version=9)
- self.assertEqual(7, len(linuxes))
- self.assertEqual(linuxes, Platform.from_string("cp39_linux_*"))
-
- def test_can_get_all_for_os_for_host_python(self):
- linuxes = Platform.all(OS.linux)
- self.assertEqual(7, len(linuxes))
- self.assertEqual(linuxes, Platform.from_string("linux_*"))
-
- def test_specific_version_specializations(self):
- any_py33 = Platform(minor_version=3)
-
- # When
- all_specializations = list(any_py33.all_specializations())
-
- want = (
- [any_py33]
- + [
- Platform(arch=arch, minor_version=any_py33.minor_version)
- for arch in Arch
- ]
- + [Platform(os=os, minor_version=any_py33.minor_version) for os in OS]
- + Platform.all(minor_version=any_py33.minor_version)
- )
- self.assertEqual(want, all_specializations)
-
- def test_aarch64_specializations(self):
- any_aarch64 = Platform(arch=Arch.aarch64)
- all_specializations = list(any_aarch64.all_specializations())
- want = [
- Platform(os=None, arch=Arch.aarch64),
- Platform(os=OS.linux, arch=Arch.aarch64),
- Platform(os=OS.osx, arch=Arch.aarch64),
- Platform(os=OS.windows, arch=Arch.aarch64),
- ]
- self.assertEqual(want, all_specializations)
-
- def test_linux_specializations(self):
- any_linux = Platform(os=OS.linux)
- all_specializations = list(any_linux.all_specializations())
- want = [
- Platform(os=OS.linux, arch=None),
- Platform(os=OS.linux, arch=Arch.x86_64),
- Platform(os=OS.linux, arch=Arch.x86_32),
- Platform(os=OS.linux, arch=Arch.aarch64),
- Platform(os=OS.linux, arch=Arch.ppc),
- Platform(os=OS.linux, arch=Arch.ppc64le),
- Platform(os=OS.linux, arch=Arch.s390x),
- Platform(os=OS.linux, arch=Arch.arm),
- ]
- self.assertEqual(want, all_specializations)
-
- def test_osx_specializations(self):
- any_osx = Platform(os=OS.osx)
- all_specializations = list(any_osx.all_specializations())
- # NOTE @aignas 2024-01-14: even though in practice we would only have
- # Python on osx aarch64 and osx x86_64, we return all arch posibilities
- # to make the code simpler.
- want = [
- Platform(os=OS.osx, arch=None),
- Platform(os=OS.osx, arch=Arch.x86_64),
- Platform(os=OS.osx, arch=Arch.x86_32),
- Platform(os=OS.osx, arch=Arch.aarch64),
- Platform(os=OS.osx, arch=Arch.ppc),
- Platform(os=OS.osx, arch=Arch.ppc64le),
- Platform(os=OS.osx, arch=Arch.s390x),
- Platform(os=OS.osx, arch=Arch.arm),
- ]
- self.assertEqual(want, all_specializations)
-
- def test_platform_sort(self):
- platforms = [
- Platform(os=OS.linux, arch=None),
- Platform(os=OS.linux, arch=Arch.x86_64),
- Platform(os=OS.osx, arch=None),
- Platform(os=OS.osx, arch=Arch.x86_64),
- Platform(os=OS.osx, arch=Arch.aarch64),
- ]
- shuffle(platforms)
- platforms.sort()
- want = [
- Platform(os=OS.linux, arch=None),
- Platform(os=OS.linux, arch=Arch.x86_64),
- Platform(os=OS.osx, arch=None),
- Platform(os=OS.osx, arch=Arch.x86_64),
- Platform(os=OS.osx, arch=Arch.aarch64),
- ]
-
- self.assertEqual(want, platforms)
-
- def test_wheel_os_alias(self):
- self.assertEqual("osx", str(OS.osx))
- self.assertEqual(str(OS.darwin), str(OS.osx))
-
- def test_wheel_arch_alias(self):
- self.assertEqual("x86_64", str(Arch.x86_64))
- self.assertEqual(str(Arch.amd64), str(Arch.x86_64))
-
- def test_wheel_platform_alias(self):
- give = Platform(
- os=OS.darwin,
- arch=Arch.amd64,
- )
- alias = Platform(
- os=OS.osx,
- arch=Arch.x86_64,
- )
-
- self.assertEqual("osx_x86_64", str(give))
- self.assertEqual(str(alias), str(give))
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/tests/pypi/whl_installer/wheel_installer_test.py b/tests/pypi/whl_installer/wheel_installer_test.py
index 7139779c3e..3c118af3c4 100644
--- a/tests/pypi/whl_installer/wheel_installer_test.py
+++ b/tests/pypi/whl_installer/wheel_installer_test.py
@@ -22,39 +22,6 @@
from python.private.pypi.whl_installer import wheel_installer
-class TestRequirementExtrasParsing(unittest.TestCase):
- def test_parses_requirement_for_extra(self) -> None:
- cases = [
- ("name[foo]", ("name", frozenset(["foo"]))),
- ("name[ Foo123 ]", ("name", frozenset(["Foo123"]))),
- (" name1[ foo ] ", ("name1", frozenset(["foo"]))),
- ("Name[foo]", ("name", frozenset(["foo"]))),
- ("name_foo[bar]", ("name-foo", frozenset(["bar"]))),
- (
- "name [fred,bar] @ http://foo.com ; python_version=='2.7'",
- ("name", frozenset(["fred", "bar"])),
- ),
- (
- "name[quux, strange];python_version<'2.7' and platform_version=='2'",
- ("name", frozenset(["quux", "strange"])),
- ),
- (
- "name; (os_name=='a' or os_name=='b') and os_name=='c'",
- (None, None),
- ),
- (
- "name@http://foo.com",
- (None, None),
- ),
- ]
-
- for case, expected in cases:
- with self.subTest():
- self.assertTupleEqual(
- wheel_installer._parse_requirement_for_extra(case), expected
- )
-
-
class TestWhlFilegroup(unittest.TestCase):
def setUp(self) -> None:
self.wheel_name = "example_minimal_package-0.0.1-py3-none-any.whl"
@@ -68,10 +35,8 @@ def tearDown(self):
def test_wheel_exists(self) -> None:
wheel_installer._extract_wheel(
Path(self.wheel_path),
- installation_dir=Path(self.wheel_dir),
- extras={},
enable_implicit_namespace_pkgs=False,
- platforms=[],
+ installation_dir=Path(self.wheel_dir),
)
want_files = [
@@ -92,11 +57,8 @@ def test_wheel_exists(self) -> None:
metadata_file_content = json.load(metadata_file)
want = dict(
- version="0.0.1",
- name="example-minimal-package",
- deps=[],
- deps_by_platform={},
entry_points=[],
+ python_version="3.11.11",
)
self.assertEqual(want, metadata_file_content)
diff --git a/tests/pypi/whl_installer/wheel_test.py b/tests/pypi/whl_installer/wheel_test.py
deleted file mode 100644
index 404218e12b..0000000000
--- a/tests/pypi/whl_installer/wheel_test.py
+++ /dev/null
@@ -1,371 +0,0 @@
-import unittest
-from unittest import mock
-
-from python.private.pypi.whl_installer import wheel
-from python.private.pypi.whl_installer.platform import OS, Arch, Platform
-
-_HOST_INTERPRETER_FN = (
- "python.private.pypi.whl_installer.wheel.host_interpreter_minor_version"
-)
-
-
-class DepsTest(unittest.TestCase):
- def test_simple(self):
- deps = wheel.Deps("foo", requires_dist=["bar"])
-
- got = deps.build()
-
- self.assertIsInstance(got, wheel.FrozenDeps)
- self.assertEqual(["bar"], got.deps)
- self.assertEqual({}, got.deps_select)
-
- def test_can_add_os_specific_deps(self):
- deps = wheel.Deps(
- "foo",
- requires_dist=[
- "bar",
- "an_osx_dep; sys_platform=='darwin'",
- "posix_dep; os_name=='posix'",
- "win_dep; os_name=='nt'",
- ],
- platforms={
- Platform(os=OS.linux, arch=Arch.x86_64),
- Platform(os=OS.osx, arch=Arch.x86_64),
- Platform(os=OS.osx, arch=Arch.aarch64),
- Platform(os=OS.windows, arch=Arch.x86_64),
- },
- )
-
- got = deps.build()
-
- self.assertEqual(["bar"], got.deps)
- self.assertEqual(
- {
- "@platforms//os:linux": ["posix_dep"],
- "@platforms//os:osx": ["an_osx_dep", "posix_dep"],
- "@platforms//os:windows": ["win_dep"],
- },
- got.deps_select,
- )
-
- def test_can_add_os_specific_deps_with_specific_python_version(self):
- deps = wheel.Deps(
- "foo",
- requires_dist=[
- "bar",
- "an_osx_dep; sys_platform=='darwin'",
- "posix_dep; os_name=='posix'",
- "win_dep; os_name=='nt'",
- ],
- platforms={
- Platform(os=OS.linux, arch=Arch.x86_64, minor_version=8),
- Platform(os=OS.osx, arch=Arch.x86_64, minor_version=8),
- Platform(os=OS.osx, arch=Arch.aarch64, minor_version=8),
- Platform(os=OS.windows, arch=Arch.x86_64, minor_version=8),
- },
- )
-
- got = deps.build()
-
- self.assertEqual(["bar"], got.deps)
- self.assertEqual(
- {
- "@platforms//os:linux": ["posix_dep"],
- "@platforms//os:osx": ["an_osx_dep", "posix_dep"],
- "@platforms//os:windows": ["win_dep"],
- },
- got.deps_select,
- )
-
- def test_deps_are_added_to_more_specialized_platforms(self):
- got = wheel.Deps(
- "foo",
- requires_dist=[
- "m1_dep; sys_platform=='darwin' and platform_machine=='arm64'",
- "mac_dep; sys_platform=='darwin'",
- ],
- platforms={
- Platform(os=OS.osx, arch=Arch.x86_64),
- Platform(os=OS.osx, arch=Arch.aarch64),
- },
- ).build()
-
- self.assertEqual(
- wheel.FrozenDeps(
- deps=[],
- deps_select={
- "osx_aarch64": ["m1_dep", "mac_dep"],
- "@platforms//os:osx": ["mac_dep"],
- },
- ),
- got,
- )
-
- def test_deps_from_more_specialized_platforms_are_propagated(self):
- got = wheel.Deps(
- "foo",
- requires_dist=[
- "a_mac_dep; sys_platform=='darwin'",
- "m1_dep; sys_platform=='darwin' and platform_machine=='arm64'",
- ],
- platforms={
- Platform(os=OS.osx, arch=Arch.x86_64),
- Platform(os=OS.osx, arch=Arch.aarch64),
- },
- ).build()
-
- self.assertEqual([], got.deps)
- self.assertEqual(
- {
- "osx_aarch64": ["a_mac_dep", "m1_dep"],
- "@platforms//os:osx": ["a_mac_dep"],
- },
- got.deps_select,
- )
-
- def test_non_platform_markers_are_added_to_common_deps(self):
- got = wheel.Deps(
- "foo",
- requires_dist=[
- "bar",
- "baz; implementation_name=='cpython'",
- "m1_dep; sys_platform=='darwin' and platform_machine=='arm64'",
- ],
- platforms={
- Platform(os=OS.linux, arch=Arch.x86_64),
- Platform(os=OS.osx, arch=Arch.x86_64),
- Platform(os=OS.osx, arch=Arch.aarch64),
- Platform(os=OS.windows, arch=Arch.x86_64),
- },
- ).build()
-
- self.assertEqual(["bar", "baz"], got.deps)
- self.assertEqual(
- {
- "osx_aarch64": ["m1_dep"],
- },
- got.deps_select,
- )
-
- def test_self_is_ignored(self):
- deps = wheel.Deps(
- "foo",
- requires_dist=[
- "bar",
- "req_dep; extra == 'requests'",
- "foo[requests]; extra == 'ssl'",
- "ssl_lib; extra == 'ssl'",
- ],
- extras={"ssl"},
- )
-
- got = deps.build()
-
- self.assertEqual(["bar", "req_dep", "ssl_lib"], got.deps)
- self.assertEqual({}, got.deps_select)
-
- def test_self_dependencies_can_come_in_any_order(self):
- deps = wheel.Deps(
- "foo",
- requires_dist=[
- "bar",
- "baz; extra == 'feat'",
- "foo[feat2]; extra == 'all'",
- "foo[feat]; extra == 'feat2'",
- "zdep; extra == 'all'",
- ],
- extras={"all"},
- )
-
- got = deps.build()
-
- self.assertEqual(["bar", "baz", "zdep"], got.deps)
- self.assertEqual({}, got.deps_select)
-
- def test_can_get_deps_based_on_specific_python_version(self):
- requires_dist = [
- "bar",
- "baz; python_version < '3.8'",
- "posix_dep; os_name=='posix' and python_version >= '3.8'",
- ]
-
- py38_deps = wheel.Deps(
- "foo",
- requires_dist=requires_dist,
- platforms=[
- Platform(os=OS.linux, arch=Arch.x86_64, minor_version=8),
- ],
- ).build()
- py37_deps = wheel.Deps(
- "foo",
- requires_dist=requires_dist,
- platforms=[
- Platform(os=OS.linux, arch=Arch.x86_64, minor_version=7),
- ],
- ).build()
-
- self.assertEqual(["bar", "baz"], py37_deps.deps)
- self.assertEqual({}, py37_deps.deps_select)
- self.assertEqual(["bar"], py38_deps.deps)
- self.assertEqual({"@platforms//os:linux": ["posix_dep"]}, py38_deps.deps_select)
-
- @mock.patch(_HOST_INTERPRETER_FN)
- def test_no_version_select_when_single_version(self, mock_host_interpreter_version):
- requires_dist = [
- "bar",
- "baz; python_version >= '3.8'",
- "posix_dep; os_name=='posix'",
- "posix_dep_with_version; os_name=='posix' and python_version >= '3.8'",
- "arch_dep; platform_machine=='x86_64' and python_version >= '3.8'",
- ]
- mock_host_interpreter_version.return_value = 7
-
- self.maxDiff = None
-
- deps = wheel.Deps(
- "foo",
- requires_dist=requires_dist,
- platforms=[
- Platform(os=os, arch=Arch.x86_64, minor_version=minor)
- for minor in [8]
- for os in [OS.linux, OS.windows]
- ],
- )
- got = deps.build()
-
- self.assertEqual(["bar", "baz"], got.deps)
- self.assertEqual(
- {
- "@platforms//os:linux": ["posix_dep", "posix_dep_with_version"],
- "linux_x86_64": ["arch_dep", "posix_dep", "posix_dep_with_version"],
- "windows_x86_64": ["arch_dep"],
- },
- got.deps_select,
- )
-
- @mock.patch(_HOST_INTERPRETER_FN)
- def test_can_get_version_select(self, mock_host_interpreter_version):
- requires_dist = [
- "bar",
- "baz; python_version < '3.8'",
- "baz_new; python_version >= '3.8'",
- "posix_dep; os_name=='posix'",
- "posix_dep_with_version; os_name=='posix' and python_version >= '3.8'",
- "arch_dep; platform_machine=='x86_64' and python_version < '3.8'",
- ]
- mock_host_interpreter_version.return_value = 7
-
- self.maxDiff = None
-
- deps = wheel.Deps(
- "foo",
- requires_dist=requires_dist,
- platforms=[
- Platform(os=os, arch=Arch.x86_64, minor_version=minor)
- for minor in [7, 8, 9]
- for os in [OS.linux, OS.windows]
- ],
- )
- got = deps.build()
-
- self.assertEqual(["bar"], got.deps)
- self.assertEqual(
- {
- "//conditions:default": ["baz"],
- "@//python/config_settings:is_python_3.7": ["baz"],
- "@//python/config_settings:is_python_3.8": ["baz_new"],
- "@//python/config_settings:is_python_3.9": ["baz_new"],
- "@platforms//os:linux": ["baz", "posix_dep"],
- "cp37_linux_x86_64": ["arch_dep", "baz", "posix_dep"],
- "cp37_windows_x86_64": ["arch_dep", "baz"],
- "cp37_linux_anyarch": ["baz", "posix_dep"],
- "cp38_linux_anyarch": [
- "baz_new",
- "posix_dep",
- "posix_dep_with_version",
- ],
- "cp39_linux_anyarch": [
- "baz_new",
- "posix_dep",
- "posix_dep_with_version",
- ],
- "linux_x86_64": ["arch_dep", "baz", "posix_dep"],
- "windows_x86_64": ["arch_dep", "baz"],
- },
- got.deps_select,
- )
-
- @mock.patch(_HOST_INTERPRETER_FN)
- def test_deps_spanning_all_target_py_versions_are_added_to_common(
- self, mock_host_version
- ):
- requires_dist = [
- "bar",
- "baz (<2,>=1.11) ; python_version < '3.8'",
- "baz (<2,>=1.14) ; python_version >= '3.8'",
- ]
- mock_host_version.return_value = 8
-
- deps = wheel.Deps(
- "foo",
- requires_dist=requires_dist,
- platforms=Platform.from_string(["cp37_*", "cp38_*", "cp39_*"]),
- )
- got = deps.build()
-
- self.assertEqual(["bar", "baz"], got.deps)
- self.assertEqual({}, got.deps_select)
-
- @mock.patch(_HOST_INTERPRETER_FN)
- def test_deps_are_not_duplicated(self, mock_host_version):
- mock_host_version.return_value = 7
-
- # See an example in
- # https://files.pythonhosted.org/packages/76/9e/db1c2d56c04b97981c06663384f45f28950a73d9acf840c4006d60d0a1ff/opencv_python-4.9.0.80-cp37-abi3-win32.whl.metadata
- requires_dist = [
- "bar >=0.1.0 ; python_version < '3.7'",
- "bar >=0.2.0 ; python_version >= '3.7'",
- "bar >=0.4.0 ; python_version >= '3.6' and platform_system == 'Linux' and platform_machine == 'aarch64'",
- "bar >=0.4.0 ; python_version >= '3.9'",
- "bar >=0.5.0 ; python_version <= '3.9' and platform_system == 'Darwin' and platform_machine == 'arm64'",
- "bar >=0.5.0 ; python_version >= '3.10' and platform_system == 'Darwin'",
- "bar >=0.5.0 ; python_version >= '3.10'",
- "bar >=0.6.0 ; python_version >= '3.11'",
- ]
-
- deps = wheel.Deps(
- "foo",
- requires_dist=requires_dist,
- platforms=Platform.from_string(["cp37_*", "cp310_*"]),
- )
- got = deps.build()
-
- self.assertEqual(["bar"], got.deps)
- self.assertEqual({}, got.deps_select)
-
- @mock.patch(_HOST_INTERPRETER_FN)
- def test_deps_are_not_duplicated_when_encountering_platform_dep_first(
- self, mock_host_version
- ):
- mock_host_version.return_value = 7
-
- # Note, that we are sorting the incoming `requires_dist` and we need to ensure that we are not getting any
- # issues even if the platform-specific line comes first.
- requires_dist = [
- "bar >=0.4.0 ; python_version >= '3.6' and platform_system == 'Linux' and platform_machine == 'aarch64'",
- "bar >=0.5.0 ; python_version >= '3.9'",
- ]
-
- deps = wheel.Deps(
- "foo",
- requires_dist=requires_dist,
- platforms=Platform.from_string(["cp37_*", "cp310_*"]),
- )
- got = deps.build()
-
- self.assertEqual(["bar"], got.deps)
- self.assertEqual({}, got.deps_select)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/tests/pypi/whl_metadata/BUILD.bazel b/tests/pypi/whl_metadata/BUILD.bazel
new file mode 100644
index 0000000000..3f1d665dd2
--- /dev/null
+++ b/tests/pypi/whl_metadata/BUILD.bazel
@@ -0,0 +1,5 @@
+load(":whl_metadata_tests.bzl", "whl_metadata_test_suite")
+
+whl_metadata_test_suite(
+ name = "whl_metadata_tests",
+)
diff --git a/tests/pypi/whl_metadata/whl_metadata_tests.bzl b/tests/pypi/whl_metadata/whl_metadata_tests.bzl
new file mode 100644
index 0000000000..4acbc9213d
--- /dev/null
+++ b/tests/pypi/whl_metadata/whl_metadata_tests.bzl
@@ -0,0 +1,147 @@
+""
+
+load("@rules_testing//lib:test_suite.bzl", "test_suite")
+load("@rules_testing//lib:truth.bzl", "subjects")
+load(
+ "//python/private/pypi:whl_metadata.bzl",
+ "find_whl_metadata",
+ "parse_whl_metadata",
+) # buildifier: disable=bzl-visibility
+
+_tests = []
+
+def _test_empty(env):
+ fake_path = struct(
+ basename = "site-packages",
+ readdir = lambda watch = None: [],
+ )
+ fail_messages = []
+ find_whl_metadata(install_dir = fake_path, logger = struct(
+ fail = fail_messages.append,
+ ))
+ env.expect.that_collection(fail_messages).contains_exactly([
+ "The '*.dist-info' directory could not be found in 'site-packages'",
+ ])
+
+_tests.append(_test_empty)
+
+def _test_contains_dist_info_but_no_metadata(env):
+ fake_path = struct(
+ basename = "site-packages",
+ readdir = lambda watch = None: [
+ struct(
+ basename = "something.dist-info",
+ is_dir = True,
+ get_child = lambda basename: struct(
+ basename = basename,
+ exists = False,
+ ),
+ ),
+ ],
+ )
+ fail_messages = []
+ find_whl_metadata(install_dir = fake_path, logger = struct(
+ fail = fail_messages.append,
+ ))
+ env.expect.that_collection(fail_messages).contains_exactly([
+ "The METADATA file for the wheel could not be found in 'site-packages/something.dist-info'",
+ ])
+
+_tests.append(_test_contains_dist_info_but_no_metadata)
+
+def _test_contains_metadata(env):
+ fake_path = struct(
+ basename = "site-packages",
+ readdir = lambda watch = None: [
+ struct(
+ basename = "something.dist-info",
+ is_dir = True,
+ get_child = lambda basename: struct(
+ basename = basename,
+ exists = True,
+ ),
+ ),
+ ],
+ )
+ fail_messages = []
+ got = find_whl_metadata(install_dir = fake_path, logger = struct(
+ fail = fail_messages.append,
+ ))
+ env.expect.that_collection(fail_messages).contains_exactly([])
+ env.expect.that_str(got.basename).equals("METADATA")
+
+_tests.append(_test_contains_metadata)
+
+def _parse_whl_metadata(env, **kwargs):
+ result = parse_whl_metadata(**kwargs)
+
+ return env.expect.that_struct(
+ struct(
+ name = result.name,
+ version = result.version,
+ requires_dist = result.requires_dist,
+ provides_extra = result.provides_extra,
+ ),
+ attrs = dict(
+ name = subjects.str,
+ version = subjects.str,
+ requires_dist = subjects.collection,
+ provides_extra = subjects.collection,
+ ),
+ )
+
+def _test_parse_metadata_invalid(env):
+ got = _parse_whl_metadata(
+ env,
+ contents = "",
+ )
+ got.name().equals("")
+ got.version().equals("")
+ got.requires_dist().contains_exactly([])
+ got.provides_extra().contains_exactly([])
+
+_tests.append(_test_parse_metadata_invalid)
+
+def _test_parse_metadata_basic(env):
+ got = _parse_whl_metadata(
+ env,
+ contents = """\
+Name: foo
+Version: 0.0.1
+""",
+ )
+ got.name().equals("foo")
+ got.version().equals("0.0.1")
+ got.requires_dist().contains_exactly([])
+ got.provides_extra().contains_exactly([])
+
+_tests.append(_test_parse_metadata_basic)
+
+def _test_parse_metadata_all(env):
+ got = _parse_whl_metadata(
+ env,
+ contents = """\
+Name: foo
+Version: 0.0.1
+Requires-Dist: bar; extra == "all"
+Provides-Extra: all
+
+Requires-Dist: this will be ignored
+""",
+ )
+ got.name().equals("foo")
+ got.version().equals("0.0.1")
+ got.requires_dist().contains_exactly([
+ "bar; extra == \"all\"",
+ ])
+ got.provides_extra().contains_exactly([
+ "all",
+ ])
+
+_tests.append(_test_parse_metadata_all)
+
+def whl_metadata_test_suite(name): # buildifier: disable=function-docstring
+ test_suite(
+ name = name,
+ basic_tests = _tests,
+ )
From 79abef898ece1a6ae2af8cb855418ac342dd27d8 Mon Sep 17 00:00:00 2001
From: Ivo List
Date: Tue, 15 Apr 2025 04:21:33 +0200
Subject: [PATCH 025/156] fix: replace string with modern providers in tests
(#2773)
Strings used to refer to legacy struct providers, which were removed
from Bazel.
Legacy struct providers have been deprecated by Bazel. Replacing them
with modern providers, will make it possible to simplify and remove
legacy handling from Blaze.
The change is a no-op.
More information: https://github.com/bazelbuild/bazel/issues/25836
---
tests/builders/attr_builders_tests.bzl | 17 +++++++++--------
1 file changed, 9 insertions(+), 8 deletions(-)
diff --git a/tests/builders/attr_builders_tests.bzl b/tests/builders/attr_builders_tests.bzl
index 58557cd633..e92ba2ae0a 100644
--- a/tests/builders/attr_builders_tests.bzl
+++ b/tests/builders/attr_builders_tests.bzl
@@ -28,6 +28,7 @@ def _expect_cfg_defaults(expect, cfg):
expect.where(expr = "cfg.which_cfg").that_str(cfg.which_cfg()).equals("target")
_some_aspect = aspect(implementation = lambda target, ctx: None)
+_SomeInfo = provider("MyInfo", fields = [])
_tests = []
@@ -186,7 +187,7 @@ def _test_label(name):
subject.set_executable(True)
subject.add_allow_files(".txt")
subject.cfg.set_target()
- subject.providers().append("provider")
+ subject.providers().append(_SomeInfo)
subject.aspects().append(_some_aspect)
subject.cfg.outputs().append(Label("//some:output"))
subject.cfg.inputs().append(Label("//some:input"))
@@ -199,7 +200,7 @@ def _test_label(name):
expect.that_bool(subject.executable()).equals(True)
expect.that_collection(subject.allow_files()).contains_exactly([".txt"])
expect.that_bool(subject.allow_single_file()).equals(None)
- expect.that_collection(subject.providers()).contains_exactly(["provider"])
+ expect.that_collection(subject.providers()).contains_exactly([_SomeInfo])
expect.that_collection(subject.aspects()).contains_exactly([_some_aspect])
expect.that_collection(subject.cfg.outputs()).contains_exactly([Label("//some:output")])
expect.that_collection(subject.cfg.inputs()).contains_exactly([Label("//some:input")])
@@ -229,7 +230,7 @@ def _test_label_keyed_string_dict(name):
subject.set_mandatory(True)
subject.set_allow_files(True)
subject.cfg.set_target()
- subject.providers().append("provider")
+ subject.providers().append(_SomeInfo)
subject.aspects().append(_some_aspect)
subject.cfg.outputs().append("//some:output")
subject.cfg.inputs().append("//some:input")
@@ -240,7 +241,7 @@ def _test_label_keyed_string_dict(name):
expect.that_str(subject.doc()).equals("doc")
expect.that_bool(subject.mandatory()).equals(True)
expect.that_bool(subject.allow_files()).equals(True)
- expect.that_collection(subject.providers()).contains_exactly(["provider"])
+ expect.that_collection(subject.providers()).contains_exactly([_SomeInfo])
expect.that_collection(subject.aspects()).contains_exactly([_some_aspect])
expect.that_collection(subject.cfg.outputs()).contains_exactly(["//some:output"])
expect.that_collection(subject.cfg.inputs()).contains_exactly(["//some:input"])
@@ -274,14 +275,14 @@ def _test_label_list(name):
subject.set_doc("doc")
subject.set_mandatory(True)
subject.set_allow_files([".txt"])
- subject.providers().append("provider")
+ subject.providers().append(_SomeInfo)
subject.aspects().append(_some_aspect)
expect.that_collection(subject.default()).contains_exactly(["//some:label"])
expect.that_str(subject.doc()).equals("doc")
expect.that_bool(subject.mandatory()).equals(True)
expect.that_collection(subject.allow_files()).contains_exactly([".txt"])
- expect.that_collection(subject.providers()).contains_exactly(["provider"])
+ expect.that_collection(subject.providers()).contains_exactly([_SomeInfo])
expect.that_collection(subject.aspects()).contains_exactly([_some_aspect])
_expect_builds(expect, subject, "attr.label_list")
@@ -395,14 +396,14 @@ def _test_string_keyed_label_dict(name):
subject.set_doc("doc")
subject.set_mandatory(True)
subject.set_allow_files([".txt"])
- subject.providers().append("provider")
+ subject.providers().append(_SomeInfo)
subject.aspects().append(_some_aspect)
expect.that_dict(subject.default()).contains_exactly({"key": "//some:label"})
expect.that_str(subject.doc()).equals("doc")
expect.that_bool(subject.mandatory()).equals(True)
expect.that_collection(subject.allow_files()).contains_exactly([".txt"])
- expect.that_collection(subject.providers()).contains_exactly(["provider"])
+ expect.that_collection(subject.providers()).contains_exactly([_SomeInfo])
expect.that_collection(subject.aspects()).contains_exactly([_some_aspect])
_expect_builds(expect, subject, "attr.string_keyed_label_dict")
From a0400e9a832d554de032fe44d8b8375ceaa32db8 Mon Sep 17 00:00:00 2001
From: Frank Portman
Date: Tue, 15 Apr 2025 04:37:01 -0400
Subject: [PATCH 026/156] feat(toolchain): Add new make vars for Python
interpreter path compliant with `--no_legacy_external_runfiles` (#2772)
Using these new make vars in `py_binary` or `py_test` will correctly
find the interpreter when setting `--no_legacy_external_runfiles`.
Fixes #2728
---
CHANGELOG.md | 2 ++
docs/toolchains.md | 6 +++++-
python/current_py_toolchain.bzl | 7 +++++++
3 files changed, 14 insertions(+), 1 deletion(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 33d99dfaa1..6f86851bdf 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -124,6 +124,8 @@ Unreleased changes template.
* (toolchains) Local Python installs can be used to create a toolchain
equivalent to the standard toolchains. See [Local toolchains] docs for how to
configure them.
+* (toolchains) Expose `$(PYTHON2_ROOTPATH)` and `$(PYTHON3_ROOTPATH)` which are runfiles
+ locations equivalents of `$(PYTHON2)` and `$(PYTHON3) respectively.
{#v0-0-0-removed}
diff --git a/docs/toolchains.md b/docs/toolchains.md
index 5cd9eb268e..320e16335b 100644
--- a/docs/toolchains.md
+++ b/docs/toolchains.md
@@ -215,7 +215,11 @@ attribute. You can obtain the path to the Python interpreter using the
`$(PYTHON2)` and `$(PYTHON3)` ["Make"
Variables](https://bazel.build/reference/be/make-variables). See the
{gh-path}`test_current_py_toolchain ` target
-for an example.
+for an example. We also make available `$(PYTHON2_ROOTPATH)` and `$(PYTHON3_ROOTPATH)`
+which are Make Variable equivalents of `$(PYTHON2)` and `$(PYTHON3)` but for runfiles
+locations. These will be helpful if you need to set env vars of binary/test rules
+while using [`--nolegacy_external_runfiles`](https://bazel.build/reference/command-line-reference#flag--legacy_external_runfiles).
+The original make variables still work in exec contexts such as genrules.
### Overriding toolchain defaults and adding more versions
diff --git a/python/current_py_toolchain.bzl b/python/current_py_toolchain.bzl
index f3ff2ace07..f5c5638a88 100644
--- a/python/current_py_toolchain.bzl
+++ b/python/current_py_toolchain.bzl
@@ -27,11 +27,13 @@ def _current_py_toolchain_impl(ctx):
direct.append(toolchain.py3_runtime.interpreter)
transitive.append(toolchain.py3_runtime.files)
vars["PYTHON3"] = toolchain.py3_runtime.interpreter.path
+ vars["PYTHON3_ROOTPATH"] = toolchain.py3_runtime.interpreter.short_path
if toolchain.py2_runtime and toolchain.py2_runtime.interpreter:
direct.append(toolchain.py2_runtime.interpreter)
transitive.append(toolchain.py2_runtime.files)
vars["PYTHON2"] = toolchain.py2_runtime.interpreter.path
+ vars["PYTHON2_ROOTPATH"] = toolchain.py2_runtime.interpreter.short_path
files = depset(direct, transitive = transitive)
return [
@@ -49,6 +51,11 @@ current_py_toolchain = rule(
other rules, such as genrule. It allows exposing a python toolchain after toolchain resolution has
happened, to a rule which expects a concrete implementation of a toolchain, rather than a
toolchain_type which could be resolved to that toolchain.
+
+ :::{versionchanged} VERSION_NEXT_FEATURE
+ From now on, we also expose `$(PYTHON2_ROOTPATH)` and `$(PYTHON3_ROOTPATH)` which are runfiles
+ locations equivalents of `$(PYTHON2)` and `$(PYTHON3) respectively.
+ :::
""",
implementation = _current_py_toolchain_impl,
attrs = {
From ccf3141bbe85f1bd7396febe08ff367101826205 Mon Sep 17 00:00:00 2001
From: Frank Portman
Date: Tue, 15 Apr 2025 04:38:54 -0400
Subject: [PATCH 027/156] fix(packaging): Format `METADATA` correctly if given
empty `requires_file` (#2771)
An empty `requires_file` used to be okay, but at some point regressed to
leaving an empty line (due to the `metadata.replace(...)`) in the
`METADATA` file - rendering the wheel uninstallable.
This PR initially attempted to solve that by introducing a new list that
processed `METADATA` lines go into, rather than relying on repeated
string replacement. But it seems like the repeated string replace
actually did more than simply process one line at a time, so I reverted
to a single substitution at the end.
---
CHANGELOG.md | 1 +
examples/wheel/BUILD.bazel | 16 ++++++++++++++++
examples/wheel/wheel_test.py | 24 +++++++++++++++++++++++-
python/packaging.bzl | 5 +++++
tools/wheelmaker.py | 7 ++++++-
5 files changed, 51 insertions(+), 2 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6f86851bdf..e7f9fe30e2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -96,6 +96,7 @@ Unreleased changes template.
* (toolchains) Run the check on the Python interpreter in isolated mode, to ensure it's not affected by userland environment variables, such as `PYTHONPATH`.
* (toolchains) Ensure temporary `.pyc` and `.pyo` files are also excluded from the interpreters repository files.
* (pypi) Run interpreter version call in isolated mode, to ensure it's not affected by userland environment variables, such as `PYTHONPATH`.
+* (packaging) An empty `requires_file` is treated as if it were omitted, resulting in a valid `METADATA` file.
{#v0-0-0-added}
### Added
diff --git a/examples/wheel/BUILD.bazel b/examples/wheel/BUILD.bazel
index d9ba800125..b434e67405 100644
--- a/examples/wheel/BUILD.bazel
+++ b/examples/wheel/BUILD.bazel
@@ -294,6 +294,12 @@ starlark # Example comment
""".splitlines(),
)
+write_file(
+ name = "empty_requires_file",
+ out = "empty_requires.txt",
+ content = [""],
+)
+
write_file(
name = "extra_requires_file",
out = "extra_requires.txt",
@@ -324,6 +330,15 @@ py_wheel(
deps = [":example_pkg"],
)
+py_wheel(
+ name = "empty_requires_files",
+ distribution = "empty_requires_files",
+ python_tag = "py3",
+ requires_file = ":empty_requires.txt",
+ version = "0.0.1",
+ deps = [":example_pkg"],
+)
+
# Package just a specific py_libraries, without their dependencies
py_wheel(
name = "minimal_data_files",
@@ -367,6 +382,7 @@ py_test(
":custom_package_root_multi_prefix",
":custom_package_root_multi_prefix_reverse_order",
":customized",
+ ":empty_requires_files",
":extra_requires",
":filename_escaping",
":minimal_data_files",
diff --git a/examples/wheel/wheel_test.py b/examples/wheel/wheel_test.py
index a3d6034930..9ec150301d 100644
--- a/examples/wheel/wheel_test.py
+++ b/examples/wheel/wheel_test.py
@@ -483,7 +483,6 @@ def test_requires_file_and_extra_requires_files(self):
if line.startswith(b"Requires-Dist:"):
requires.append(line.decode("utf-8").strip())
- print(requires)
self.assertEqual(
[
"Requires-Dist: tomli>=2.0.0",
@@ -495,6 +494,29 @@ def test_requires_file_and_extra_requires_files(self):
requires,
)
+ def test_empty_requires_file(self):
+ filename = self._get_path("empty_requires_files-0.0.1-py3-none-any.whl")
+
+ with zipfile.ZipFile(filename) as zf:
+ self.assertAllEntriesHasReproducibleMetadata(zf)
+ metadata_file = None
+ for f in zf.namelist():
+ if os.path.basename(f) == "METADATA":
+ metadata_file = f
+ self.assertIsNotNone(metadata_file)
+
+ metadata = zf.read(metadata_file).decode("utf-8")
+ metadata_lines = metadata.splitlines()
+
+ requires = []
+ for i, line in enumerate(metadata_lines):
+ if line.startswith("Name:"):
+ self.assertTrue(metadata_lines[i + 1].startswith("Version:"))
+ if line.startswith("Requires-Dist:"):
+ requires.append(line.strip())
+
+ self.assertEqual([], requires)
+
def test_minimal_data_files(self):
filename = self._get_path("minimal_data_files-0.0.1-py3-none-any.whl")
diff --git a/python/packaging.bzl b/python/packaging.bzl
index 629af2d6a4..b190635cfe 100644
--- a/python/packaging.bzl
+++ b/python/packaging.bzl
@@ -101,6 +101,11 @@ def py_wheel(
Currently only pure-python wheels are supported.
+ :::{versionchanged} VERSION_NEXT_FEATURE
+ From now on, an empty `requires_file` is treated as if it were omitted, resulting in a valid
+ `METADATA` file.
+ :::
+
Examples:
```python
diff --git a/tools/wheelmaker.py b/tools/wheelmaker.py
index 23b18eca5f..908b3fe956 100644
--- a/tools/wheelmaker.py
+++ b/tools/wheelmaker.py
@@ -599,7 +599,12 @@ def get_new_requirement_line(reqs_text, extra):
reqs.append(get_new_requirement_line(reqs_text, extra))
- metadata = metadata.replace(meta_line, "\n".join(reqs))
+ if reqs:
+ metadata = metadata.replace(meta_line, "\n".join(reqs))
+ # File is empty
+ # So replace the meta_line entirely, including removing newline chars
+ else:
+ metadata = re.sub(re.escape(meta_line) + r"(?:\r?\n)?", "", metadata, count=1)
maker.add_metadata(
metadata=metadata,
From ff1388356b0d47b6249dc606ae4ba521df54a06f Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 15 Apr 2025 17:40:02 +0900
Subject: [PATCH 028/156] build(deps): bump typing-extensions from 4.12.2 to
4.13.2 in /docs (#2776)
Bumps [typing-extensions](https://github.com/python/typing_extensions)
from 4.12.2 to 4.13.2.
Release notes
Sourced from typing-extensions's
releases.
4.13.2
- Fix
TypeError
when taking the union of
typing_extensions.TypeAliasType
and a
typing.TypeAliasType
on Python 3.12 and 3.13.
Patch by Joren
Hammudoglu.
- Backport from CPython PR #132160
to avoid having user arguments shadowed in generated
__new__
by
@typing_extensions.deprecated
.
Patch by Victorien Plot.
4.13.1
This is a bugfix release fixing two edge cases that appear on old
bugfix releases of CPython.
Bugfixes:
- Fix regression in 4.13.0 on Python 3.10.2 causing a
TypeError
when using Concatenate
.
Patch by Daraan.
- Fix
TypeError
when using
evaluate_forward_ref
on Python 3.10.1-2 and 3.9.8-10.
Patch by Daraan.
4.13.0
New features:
- Add
typing_extensions.TypeForm
from PEP 747. Patch by
Jelle Zijlstra.
- Add
typing_extensions.get_annotations
, a backport of
inspect.get_annotations
that adds features specified
by PEP 649. Patches by Jelle Zijlstra and Alex Waygood.
- Backport
evaluate_forward_ref
from CPython PR
#119891
to evaluate ForwardRef
s.
Patch by Daraan, backporting a
CPython PR by Jelle Zijlstra.
Bugfixes and changed features:
- Update PEP 728 implementation to a newer version of the PEP. Patch
by Jelle Zijlstra.
- Copy the coroutine status of functions and methods wrapped
with
@typing_extensions.deprecated
. Patch by Sebastian
Rittau.
- Fix bug where
TypeAliasType
instances could be
subscripted even
where they were not generic. Patch by Daraan.
- Fix bug where a subscripted
TypeAliasType
instance did
not have all
attributes of the original TypeAliasType
instance on older
Python versions.
Patch by Daraan and Alex
Waygood.
- Fix bug where subscripted
TypeAliasType
instances (and
some other
subscripted objects) had wrong parameters if they were directly
subscripted with an Unpack
object.
Patch by Daraan.
- Backport to Python 3.10 the ability to substitute
...
in generic Callable
aliases that have a Concatenate
special form as their
argument.
Patch by Daraan.
- Extended the
Concatenate
backport for Python 3.8-3.10
to now accept
Ellipsis
as an argument. Patch by Daraan.
- Fix backport of
get_type_hints
to reflect Python 3.11+
behavior which does not add
... (truncated)
Changelog
Sourced from typing-extensions's
changelog.
Release 4.13.2 (April 10, 2025)
- Fix
TypeError
when taking the union of
typing_extensions.TypeAliasType
and a
typing.TypeAliasType
on Python 3.12 and 3.13.
Patch by Joren
Hammudoglu.
- Backport from CPython PR #132160
to avoid having user arguments shadowed in generated
__new__
by
@typing_extensions.deprecated
.
Patch by Victorien Plot.
Release 4.13.1 (April 3, 2025)
Bugfixes:
- Fix regression in 4.13.0 on Python 3.10.2 causing a
TypeError
when using Concatenate
.
Patch by Daraan.
- Fix
TypeError
when using
evaluate_forward_ref
on Python 3.10.1-2 and 3.9.8-10.
Patch by Daraan.
Release 4.13.0 (March 25, 2025)
No user-facing changes since 4.13.0rc1.
Release 4.13.0rc1 (March 18, 2025)
New features:
- Add
typing_extensions.TypeForm
from PEP 747. Patch by
Jelle Zijlstra.
- Add
typing_extensions.get_annotations
, a backport of
inspect.get_annotations
that adds features specified
by PEP 649. Patches by Jelle Zijlstra and Alex Waygood.
- Backport
evaluate_forward_ref
from CPython PR
#119891
to evaluate ForwardRef
s.
Patch by Daraan, backporting a
CPython PR by Jelle Zijlstra.
Bugfixes and changed features:
- Update PEP 728 implementation to a newer version of the PEP. Patch
by Jelle Zijlstra.
- Copy the coroutine status of functions and methods wrapped
with
@typing_extensions.deprecated
. Patch by Sebastian
Rittau.
- Fix bug where
TypeAliasType
instances could be
subscripted even
where they were not generic. Patch by Daraan.
- Fix bug where a subscripted
TypeAliasType
instance did
not have all
attributes of the original TypeAliasType
instance on older
Python versions.
Patch by Daraan and Alex
Waygood.
- Fix bug where subscripted
TypeAliasType
instances (and
some other
subscripted objects) had wrong parameters if they were directly
subscripted with an Unpack
object.
Patch by Daraan.
- Backport to Python 3.10 the ability to substitute
...
in generic Callable
... (truncated)
Commits
4525e9d
Prepare release 4.13.2 (#583)
88a0c20
Do not shadow user arguments in generated __new__
by
@deprecated
(#581)
281d7b0
Add 3rd party tests for litestar (#578)
8092c39
fix TypeAliasType
union with
typing.TypeAliasType
(#575)
45a8847
Prepare release 4.13.1 (#573)
f264e58
Move CI to "ubuntu-latest" (round 2) (#570)
5ce0e69
Fix TypeError with evaluate_forward_ref on some 3.10 and 3.9 versions
(#558)
304f5cb
Add SQLAlchemy to third-party daily tests (#561)
ebe2b94
Fix duplicated keywords for typing._ConcatenateGenericAlias in 3.10.2
(#557)
9f93d6f
Add intersphinx links for 3.13 typing features (#550)
- Additional commits viewable in compare
view
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
docs/requirements.txt | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 8d1cbabffc..e2fb59565a 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -351,9 +351,9 @@ sphinxcontrib-serializinghtml==2.0.0 \
--hash=sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331 \
--hash=sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d
# via sphinx
-typing-extensions==4.12.2 \
- --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \
- --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8
+typing-extensions==4.13.2 \
+ --hash=sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c \
+ --hash=sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef
# via
# rules-python-docs (docs/pyproject.toml)
# sphinx-autodoc2
From 2cf7ba4bb76f630ff7f2c83cab0b5294db65107b Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 15 Apr 2025 17:40:24 +0900
Subject: [PATCH 029/156] build(deps): bump urllib3 from 2.3.0 to 2.4.0 in
/tools/publish (#2775)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps [urllib3](https://github.com/urllib3/urllib3) from 2.3.0 to 2.4.0.
Release notes
Sourced from urllib3's
releases.
2.4.0
🚀 urllib3 is fundraising for HTTP/2 support
urllib3
is raising ~$40,000 USD to release HTTP/2 support and ensure
long-term sustainable maintenance of the project after a sharp decline
in financial support. If your company or organization uses Python and
would benefit from HTTP/2 support in Requests, pip, cloud SDKs, and
thousands of other projects please consider contributing
financially to ensure HTTP/2 support is developed sustainably and
maintained for the long-haul.
Thank you for your support.
Features
- Applied PEP 639 by specifying the license fields in pyproject.toml.
(#3522)
- Updated exceptions to save and restore more properties during the
pickle/serialization process. (#3567)
- Added
verify_flags
option to
create_urllib3_context
with a default of
VERIFY_X509_PARTIAL_CHAIN
and
VERIFY_X509_STRICT
for Python 3.13+. (#3571)
Bugfixes
- Fixed a bug with partial reads of streaming data in Emscripten. (#3555)
Misc
- Switched to uv for installing development dependecies. (#3550)
- Removed the
multiple.intoto.jsonl
asset from GitHub
releases. Attestation of release files since v2.3.0 can be found on
PyPI. (#3566)
Changelog
Sourced from urllib3's
changelog.
2.4.0 (2025-04-10)
Features
- Applied PEP 639 by specifying the license fields in pyproject.toml.
(
[#3522](https://github.com/urllib3/urllib3/issues/3522)
<https://github.com/urllib3/urllib3/issues/3522>
__)
- Updated exceptions to save and restore more properties during the
pickle/serialization process.
(
[#3567](https://github.com/urllib3/urllib3/issues/3567)
<https://github.com/urllib3/urllib3/issues/3567>
__)
- Added
verify_flags
option to
create_urllib3_context
with a default of
VERIFY_X509_PARTIAL_CHAIN
and
VERIFY_X509_STRICT
for Python 3.13+.
([#3571](https://github.com/urllib3/urllib3/issues/3571)
<https://github.com/urllib3/urllib3/issues/3571>
__)
Bugfixes
- Fixed a bug with partial reads of streaming data in Emscripten.
(
[#3555](https://github.com/urllib3/urllib3/issues/3555)
<https://github.com/urllib3/urllib3/issues/3555>
__)
Misc
- Switched to uv for installing development dependecies.
(
[#3550](https://github.com/urllib3/urllib3/issues/3550)
<https://github.com/urllib3/urllib3/issues/3550>
__)
- Removed the
multiple.intoto.jsonl
asset from GitHub
releases. Attestation of release files since v2.3.0 can be found on
PyPI. ([#3566](https://github.com/urllib3/urllib3/issues/3566)
<https://github.com/urllib3/urllib3/issues/3566>
__)
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
tools/publish/requirements_darwin.txt | 6 +++---
tools/publish/requirements_linux.txt | 6 +++---
tools/publish/requirements_universal.txt | 6 +++---
tools/publish/requirements_windows.txt | 6 +++---
4 files changed, 12 insertions(+), 12 deletions(-)
diff --git a/tools/publish/requirements_darwin.txt b/tools/publish/requirements_darwin.txt
index 5f8a33c3f5..eaec72c01c 100644
--- a/tools/publish/requirements_darwin.txt
+++ b/tools/publish/requirements_darwin.txt
@@ -202,9 +202,9 @@ twine==5.1.1 \
--hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \
--hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db
# via -r tools/publish/requirements.in
-urllib3==2.3.0 \
- --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \
- --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d
+urllib3==2.4.0 \
+ --hash=sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466 \
+ --hash=sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813
# via
# requests
# twine
diff --git a/tools/publish/requirements_linux.txt b/tools/publish/requirements_linux.txt
index 40d987b16d..5fdc742a88 100644
--- a/tools/publish/requirements_linux.txt
+++ b/tools/publish/requirements_linux.txt
@@ -318,9 +318,9 @@ twine==5.1.1 \
--hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \
--hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db
# via -r tools/publish/requirements.in
-urllib3==2.3.0 \
- --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \
- --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d
+urllib3==2.4.0 \
+ --hash=sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466 \
+ --hash=sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813
# via
# requests
# twine
diff --git a/tools/publish/requirements_universal.txt b/tools/publish/requirements_universal.txt
index c8bc0bb258..97cbef0221 100644
--- a/tools/publish/requirements_universal.txt
+++ b/tools/publish/requirements_universal.txt
@@ -322,9 +322,9 @@ twine==5.1.1 \
--hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \
--hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db
# via -r tools/publish/requirements.in
-urllib3==2.3.0 \
- --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \
- --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d
+urllib3==2.4.0 \
+ --hash=sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466 \
+ --hash=sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813
# via
# requests
# twine
diff --git a/tools/publish/requirements_windows.txt b/tools/publish/requirements_windows.txt
index 1980812d15..458414009e 100644
--- a/tools/publish/requirements_windows.txt
+++ b/tools/publish/requirements_windows.txt
@@ -206,9 +206,9 @@ twine==5.1.1 \
--hash=sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997 \
--hash=sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db
# via -r tools/publish/requirements.in
-urllib3==2.3.0 \
- --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \
- --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d
+urllib3==2.4.0 \
+ --hash=sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466 \
+ --hash=sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813
# via
# requests
# twine
From 101962aecbe048525248361d7a8e6341655fa30f Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 15 Apr 2025 17:40:45 +0900
Subject: [PATCH 030/156] build(deps): bump urllib3 from 2.3.0 to 2.4.0 in
/docs (#2774)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps [urllib3](https://github.com/urllib3/urllib3) from 2.3.0 to 2.4.0.
Release notes
Sourced from urllib3's
releases.
2.4.0
🚀 urllib3 is fundraising for HTTP/2 support
urllib3
is raising ~$40,000 USD to release HTTP/2 support and ensure
long-term sustainable maintenance of the project after a sharp decline
in financial support. If your company or organization uses Python and
would benefit from HTTP/2 support in Requests, pip, cloud SDKs, and
thousands of other projects please consider contributing
financially to ensure HTTP/2 support is developed sustainably and
maintained for the long-haul.
Thank you for your support.
Features
- Applied PEP 639 by specifying the license fields in pyproject.toml.
(#3522)
- Updated exceptions to save and restore more properties during the
pickle/serialization process. (#3567)
- Added
verify_flags
option to
create_urllib3_context
with a default of
VERIFY_X509_PARTIAL_CHAIN
and
VERIFY_X509_STRICT
for Python 3.13+. (#3571)
Bugfixes
- Fixed a bug with partial reads of streaming data in Emscripten. (#3555)
Misc
- Switched to uv for installing development dependecies. (#3550)
- Removed the
multiple.intoto.jsonl
asset from GitHub
releases. Attestation of release files since v2.3.0 can be found on
PyPI. (#3566)
Changelog
Sourced from urllib3's
changelog.
2.4.0 (2025-04-10)
Features
- Applied PEP 639 by specifying the license fields in pyproject.toml.
(
[#3522](https://github.com/urllib3/urllib3/issues/3522)
<https://github.com/urllib3/urllib3/issues/3522>
__)
- Updated exceptions to save and restore more properties during the
pickle/serialization process.
(
[#3567](https://github.com/urllib3/urllib3/issues/3567)
<https://github.com/urllib3/urllib3/issues/3567>
__)
- Added
verify_flags
option to
create_urllib3_context
with a default of
VERIFY_X509_PARTIAL_CHAIN
and
VERIFY_X509_STRICT
for Python 3.13+.
([#3571](https://github.com/urllib3/urllib3/issues/3571)
<https://github.com/urllib3/urllib3/issues/3571>
__)
Bugfixes
- Fixed a bug with partial reads of streaming data in Emscripten.
(
[#3555](https://github.com/urllib3/urllib3/issues/3555)
<https://github.com/urllib3/urllib3/issues/3555>
__)
Misc
- Switched to uv for installing development dependecies.
(
[#3550](https://github.com/urllib3/urllib3/issues/3550)
<https://github.com/urllib3/urllib3/issues/3550>
__)
- Removed the
multiple.intoto.jsonl
asset from GitHub
releases. Attestation of release files since v2.3.0 can be found on
PyPI. ([#3566](https://github.com/urllib3/urllib3/issues/3566)
<https://github.com/urllib3/urllib3/issues/3566>
__)
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
docs/requirements.txt | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/docs/requirements.txt b/docs/requirements.txt
index e2fb59565a..5e308b00f4 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -357,7 +357,7 @@ typing-extensions==4.13.2 \
# via
# rules-python-docs (docs/pyproject.toml)
# sphinx-autodoc2
-urllib3==2.3.0 \
- --hash=sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df \
- --hash=sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d
+urllib3==2.4.0 \
+ --hash=sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466 \
+ --hash=sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813
# via requests
From 8fc25de7dcec1d1106edd8e076c9fcb58497b40b Mon Sep 17 00:00:00 2001
From: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
Date: Wed, 16 Apr 2025 14:45:34 +0900
Subject: [PATCH 031/156] refactor(bzlmod): stop using 'repo' attr in
whl_library (#2779)
A simple non-functional cleanup that just removes legacy code paths from
bzlmod PyPI integration.
---
python/private/pypi/extension.bzl | 1 -
python/private/pypi/whl_library.bzl | 6 ++++--
tests/pypi/extension/extension_tests.bzl | 22 ----------------------
3 files changed, 4 insertions(+), 25 deletions(-)
diff --git a/python/private/pypi/extension.bzl b/python/private/pypi/extension.bzl
index 8fce47656b..d2ae132741 100644
--- a/python/private/pypi/extension.bzl
+++ b/python/private/pypi/extension.bzl
@@ -181,7 +181,6 @@ def _create_whl_repos(
# Construct args separately so that the lock file can be smaller and does not include unused
# attrs.
whl_library_args = dict(
- repo = pip_name,
dep_template = "@{}//{{name}}:{{target}}".format(hub_name),
)
maybe_args = dict(
diff --git a/python/private/pypi/whl_library.bzl b/python/private/pypi/whl_library.bzl
index 54f9ff3909..0a580011ab 100644
--- a/python/private/pypi/whl_library.bzl
+++ b/python/private/pypi/whl_library.bzl
@@ -517,8 +517,10 @@ and the target that we need respectively.
doc = "Name of the group, if any.",
),
"repo": attr.string(
- mandatory = True,
- doc = "Pointer to parent repo name. Used to make these rules rerun if the parent repo changes.",
+ doc = """\
+Pointer to parent repo name. Used to make these rules rerun if the parent repo changes.
+Only used in WORKSPACE when the {attr}`dep_template` is not set.
+""",
),
"repo_prefix": attr.string(
doc = """
diff --git a/tests/pypi/extension/extension_tests.bzl b/tests/pypi/extension/extension_tests.bzl
index 66c9e0549e..4d86d6a6e0 100644
--- a/tests/pypi/extension/extension_tests.bzl
+++ b/tests/pypi/extension/extension_tests.bzl
@@ -174,7 +174,6 @@ def _test_simple(env):
"pypi_315_simple": {
"dep_template": "@pypi//{name}:{target}",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "simple==0.0.1 --hash=sha256:deadbeef --hash=sha256:deadbaaf",
},
})
@@ -234,13 +233,11 @@ def _test_simple_multiple_requirements(env):
"pypi_315_simple_osx_aarch64_osx_x86_64": {
"dep_template": "@pypi//{name}:{target}",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "simple==0.0.2 --hash=sha256:deadb00f",
},
"pypi_315_simple_windows_x86_64": {
"dep_template": "@pypi//{name}:{target}",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "simple==0.0.1 --hash=sha256:deadbeef",
},
})
@@ -307,13 +304,11 @@ torch==2.4.1 ; platform_machine != 'x86_64' \
"pypi_315_torch_linux_aarch64_linux_arm_linux_ppc_linux_s390x_osx_aarch64": {
"dep_template": "@pypi//{name}:{target}",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "torch==2.4.1 --hash=sha256:deadbeef",
},
"pypi_315_torch_linux_x86_64_osx_x86_64_windows_x86_64": {
"dep_template": "@pypi//{name}:{target}",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "torch==2.4.1+cpu",
},
})
@@ -444,7 +439,6 @@ torch==2.4.1+cpu ; platform_machine == 'x86_64' \
"experimental_target_platforms": ["cp312_linux_x86_64"],
"filename": "torch-2.4.1+cpu-cp312-cp312-linux_x86_64.whl",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_312",
"requirement": "torch==2.4.1+cpu",
"sha256": "8800deef0026011d502c0c256cc4b67d002347f63c3a38cd8e45f1f445c61364",
"urls": ["https://torch.index/whl/cpu/torch-2.4.1%2Bcpu-cp312-cp312-linux_x86_64.whl"],
@@ -454,7 +448,6 @@ torch==2.4.1+cpu ; platform_machine == 'x86_64' \
"experimental_target_platforms": ["cp312_linux_aarch64"],
"filename": "torch-2.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_312",
"requirement": "torch==2.4.1",
"sha256": "36109432b10bd7163c9b30ce896f3c2cca1b86b9765f956a1594f0ff43091e2a",
"urls": ["https://torch.index/whl/cpu/torch-2.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl"],
@@ -464,7 +457,6 @@ torch==2.4.1+cpu ; platform_machine == 'x86_64' \
"experimental_target_platforms": ["cp312_windows_x86_64"],
"filename": "torch-2.4.1+cpu-cp312-cp312-win_amd64.whl",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_312",
"requirement": "torch==2.4.1+cpu",
"sha256": "3a570e5c553415cdbddfe679207327b3a3806b21c6adea14fba77684d1619e97",
"urls": ["https://torch.index/whl/cpu/torch-2.4.1%2Bcpu-cp312-cp312-win_amd64.whl"],
@@ -474,7 +466,6 @@ torch==2.4.1+cpu ; platform_machine == 'x86_64' \
"experimental_target_platforms": ["cp312_osx_aarch64"],
"filename": "torch-2.4.1-cp312-none-macosx_11_0_arm64.whl",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_312",
"requirement": "torch==2.4.1",
"sha256": "72b484d5b6cec1a735bf3fa5a1c4883d01748698c5e9cfdbeb4ffab7c7987e0d",
"urls": ["https://torch.index/whl/cpu/torch-2.4.1-cp312-none-macosx_11_0_arm64.whl"],
@@ -560,7 +551,6 @@ simple==0.0.3 \
"experimental_target_platforms": ["cp315_linux_x86_64"],
"extra_pip_args": ["--platform=manylinux_2_17_x86_64", "--python-version=315", "--implementation=cp", "--abi=cp315"],
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "extra==0.0.1 --hash=sha256:deadb00f",
},
"pypi_315_simple_linux_x86_64": {
@@ -569,7 +559,6 @@ simple==0.0.3 \
"experimental_target_platforms": ["cp315_linux_x86_64"],
"extra_pip_args": ["--platform=manylinux_2_17_x86_64", "--python-version=315", "--implementation=cp", "--abi=cp315"],
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "simple==0.0.1 --hash=sha256:deadbeef",
},
"pypi_315_simple_osx_aarch64": {
@@ -578,7 +567,6 @@ simple==0.0.3 \
"experimental_target_platforms": ["cp315_osx_aarch64"],
"extra_pip_args": ["--platform=macosx_10_9_arm64", "--python-version=315", "--implementation=cp", "--abi=cp315"],
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "simple==0.0.3 --hash=sha256:deadbaaf",
},
})
@@ -766,7 +754,6 @@ git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef
"extra_pip_args": ["--extra-args-for-sdist-building"],
"filename": "any-name.tar.gz",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "direct_sdist_without_sha @ some-archive/any-name.tar.gz",
"sha256": "",
"urls": ["some-archive/any-name.tar.gz"],
@@ -776,7 +763,6 @@ git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef
"experimental_target_platforms": ["cp315_linux_aarch64", "cp315_linux_arm", "cp315_linux_ppc", "cp315_linux_s390x", "cp315_linux_x86_64", "cp315_osx_aarch64", "cp315_osx_x86_64", "cp315_windows_x86_64"],
"filename": "direct_without_sha-0.0.1-py3-none-any.whl",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "direct_without_sha==0.0.1 @ example-direct.org/direct_without_sha-0.0.1-py3-none-any.whl",
"sha256": "",
"urls": ["example-direct.org/direct_without_sha-0.0.1-py3-none-any.whl"],
@@ -785,14 +771,12 @@ git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef
"dep_template": "@pypi//{name}:{target}",
"extra_pip_args": ["--extra-args-for-sdist-building"],
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef",
},
"pypi_315_pip_fallback": {
"dep_template": "@pypi//{name}:{target}",
"extra_pip_args": ["--extra-args-for-sdist-building"],
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "pip_fallback==0.0.1",
},
"pypi_315_simple_py3_none_any_deadb00f": {
@@ -800,7 +784,6 @@ git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef
"experimental_target_platforms": ["cp315_linux_aarch64", "cp315_linux_arm", "cp315_linux_ppc", "cp315_linux_s390x", "cp315_linux_x86_64", "cp315_osx_aarch64", "cp315_osx_x86_64", "cp315_windows_x86_64"],
"filename": "simple-0.0.1-py3-none-any.whl",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "simple==0.0.1",
"sha256": "deadb00f",
"urls": ["example2.org"],
@@ -811,7 +794,6 @@ git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef
"extra_pip_args": ["--extra-args-for-sdist-building"],
"filename": "simple-0.0.1.tar.gz",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "simple==0.0.1",
"sha256": "deadbeef",
"urls": ["example.org"],
@@ -821,7 +803,6 @@ git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef
"experimental_target_platforms": ["cp315_linux_aarch64", "cp315_linux_arm", "cp315_linux_ppc", "cp315_linux_s390x", "cp315_linux_x86_64", "cp315_osx_aarch64", "cp315_osx_x86_64", "cp315_windows_x86_64"],
"filename": "some_pkg-0.0.1-py3-none-any.whl",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "some_pkg==0.0.1 @ example-direct.org/some_pkg-0.0.1-py3-none-any.whl --hash=sha256:deadbaaf",
"sha256": "deadbaaf",
"urls": ["example-direct.org/some_pkg-0.0.1-py3-none-any.whl"],
@@ -831,7 +812,6 @@ git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef
"experimental_target_platforms": ["cp315_linux_aarch64", "cp315_linux_arm", "cp315_linux_ppc", "cp315_linux_s390x", "cp315_linux_x86_64", "cp315_osx_aarch64", "cp315_osx_x86_64", "cp315_windows_x86_64"],
"filename": "some-other-pkg-0.0.1-py3-none-any.whl",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "some_other_pkg==0.0.1",
"sha256": "deadb33f",
"urls": ["example2.org/index/some_other_pkg/"],
@@ -920,13 +900,11 @@ optimum[onnxruntime-gpu]==1.17.1 ; sys_platform == 'linux'
"pypi_315_optimum_linux_aarch64_linux_arm_linux_ppc_linux_s390x_linux_x86_64": {
"dep_template": "@pypi//{name}:{target}",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "optimum[onnxruntime-gpu]==1.17.1",
},
"pypi_315_optimum_osx_aarch64_osx_x86_64": {
"dep_template": "@pypi//{name}:{target}",
"python_interpreter_target": "unit_test_interpreter_target",
- "repo": "pypi_315",
"requirement": "optimum[onnxruntime]==1.17.1",
},
})
From c813d845b959e37d4949e368c86bc1277d153b38 Mon Sep 17 00:00:00 2001
From: Matt Mackay
Date: Wed, 16 Apr 2025 23:45:50 -0400
Subject: [PATCH 032/156] perf: lazily load gazelle manifest files (#2746)
In large repositories where Python may not be the only language, the
gazelle manifest loading is done unnecessarily, and is done during the
configuration walk.
This means that even for non-python gazelle invocations (eg `bazel run
gazelle -- web/`), Python manifest files are being parsed and loaded
into memory.
This issue compounds if the repository uses multiple dependency
closures, ie multiple `gazelle_python.yaml` files.
In our repo, we currently have ~250 Python manifests, so loading them
when Gazelle is only running over other languages is time consuming.
Co-authored-by: Douglas Thor
---
CHANGELOG.md | 3 +++
gazelle/python/configure.go | 24 +----------------
gazelle/pythonconfig/pythonconfig.go | 40 +++++++++++++++++++++++++---
3 files changed, 40 insertions(+), 27 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index e7f9fe30e2..299a43e1ff 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -76,6 +76,9 @@ Unreleased changes template.
* (pypi) The PyPI extension will no longer write the lock file entries as the
extension has been marked reproducible.
Fixes [#2434](https://github.com/bazel-contrib/rules_python/issues/2434).
+* (gazelle) Lazily load and parse manifest files when running Gazelle. This ensures no
+ manifest files are loaded when Gazelle is run over a set of non-python directories
+ [PR #2746](https://github.com/bazel-contrib/rules_python/pull/2746).
* (rules) {attr}`py_binary.srcs` and {attr}`py_test.srcs` is no longer mandatory when
`main_module` is specified (for `--bootstrap_impl=script`)
diff --git a/gazelle/python/configure.go b/gazelle/python/configure.go
index 7b1f091b34..a00b0ba0ba 100644
--- a/gazelle/python/configure.go
+++ b/gazelle/python/configure.go
@@ -18,7 +18,6 @@ import (
"flag"
"fmt"
"log"
- "os"
"path/filepath"
"strconv"
"strings"
@@ -27,7 +26,6 @@ import (
"github.com/bazelbuild/bazel-gazelle/rule"
"github.com/bmatcuk/doublestar/v4"
- "github.com/bazel-contrib/rules_python/gazelle/manifest"
"github.com/bazel-contrib/rules_python/gazelle/pythonconfig"
)
@@ -228,25 +226,5 @@ func (py *Configurer) Configure(c *config.Config, rel string, f *rule.File) {
}
gazelleManifestPath := filepath.Join(c.RepoRoot, rel, gazelleManifestFilename)
- gazelleManifest, err := py.loadGazelleManifest(gazelleManifestPath)
- if err != nil {
- log.Fatal(err)
- }
- if gazelleManifest != nil {
- config.SetGazelleManifest(gazelleManifest)
- }
-}
-
-func (py *Configurer) loadGazelleManifest(gazelleManifestPath string) (*manifest.Manifest, error) {
- if _, err := os.Stat(gazelleManifestPath); err != nil {
- if os.IsNotExist(err) {
- return nil, nil
- }
- return nil, fmt.Errorf("failed to load Gazelle manifest at %q: %w", gazelleManifestPath, err)
- }
- manifestFile := new(manifest.File)
- if err := manifestFile.Decode(gazelleManifestPath); err != nil {
- return nil, fmt.Errorf("failed to load Gazelle manifest at %q: %w", gazelleManifestPath, err)
- }
- return manifestFile.Manifest, nil
+ config.SetGazelleManifestPath(gazelleManifestPath)
}
diff --git a/gazelle/pythonconfig/pythonconfig.go b/gazelle/pythonconfig/pythonconfig.go
index 23c0cfd572..866339d449 100644
--- a/gazelle/pythonconfig/pythonconfig.go
+++ b/gazelle/pythonconfig/pythonconfig.go
@@ -16,6 +16,8 @@ package pythonconfig
import (
"fmt"
+ "log"
+ "os"
"path"
"regexp"
"strings"
@@ -153,10 +155,11 @@ func (c Configs) ParentForPackage(pkg string) *Config {
type Config struct {
parent *Config
- extensionEnabled bool
- repoRoot string
- pythonProjectRoot string
- gazelleManifest *manifest.Manifest
+ extensionEnabled bool
+ repoRoot string
+ pythonProjectRoot string
+ gazelleManifestPath string
+ gazelleManifest *manifest.Manifest
excludedPatterns *singlylinkedlist.List
ignoreFiles map[string]struct{}
@@ -281,11 +284,26 @@ func (c *Config) SetGazelleManifest(gazelleManifest *manifest.Manifest) {
c.gazelleManifest = gazelleManifest
}
+// SetGazelleManifestPath sets the path to the gazelle_python.yaml file
+// for the current configuration.
+func (c *Config) SetGazelleManifestPath(gazelleManifestPath string) {
+ c.gazelleManifestPath = gazelleManifestPath
+}
+
// FindThirdPartyDependency scans the gazelle manifests for the current config
// and the parent configs up to the root finding if it can resolve the module
// name.
func (c *Config) FindThirdPartyDependency(modName string) (string, string, bool) {
for currentCfg := c; currentCfg != nil; currentCfg = currentCfg.parent {
+ // Attempt to load the manifest if needed.
+ if currentCfg.gazelleManifestPath != "" && currentCfg.gazelleManifest == nil {
+ currentCfgManifest, err := loadGazelleManifest(currentCfg.gazelleManifestPath)
+ if err != nil {
+ log.Fatal(err)
+ }
+ currentCfg.SetGazelleManifest(currentCfgManifest)
+ }
+
if currentCfg.gazelleManifest != nil {
gazelleManifest := currentCfg.gazelleManifest
if distributionName, ok := gazelleManifest.ModulesMapping[modName]; ok {
@@ -526,3 +544,17 @@ func (c *Config) FormatThirdPartyDependency(repositoryName string, distributionN
return label.New(repositoryName, normConventionalDistributionName, normConventionalDistributionName)
}
+
+func loadGazelleManifest(gazelleManifestPath string) (*manifest.Manifest, error) {
+ if _, err := os.Stat(gazelleManifestPath); err != nil {
+ if os.IsNotExist(err) {
+ return nil, nil
+ }
+ return nil, fmt.Errorf("failed to load Gazelle manifest at %q: %w", gazelleManifestPath, err)
+ }
+ manifestFile := new(manifest.File)
+ if err := manifestFile.Decode(gazelleManifestPath); err != nil {
+ return nil, fmt.Errorf("failed to load Gazelle manifest at %q: %w", gazelleManifestPath, err)
+ }
+ return manifestFile.Manifest, nil
+}
From d0950c5648789071667b852a6d736cf865e2ff07 Mon Sep 17 00:00:00 2001
From: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
Date: Fri, 18 Apr 2025 07:00:05 +0900
Subject: [PATCH 033/156] fix(ci): use ubuntu-latest for mypy action (#2784)
---
.github/workflows/mypy.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/mypy.yaml b/.github/workflows/mypy.yaml
index 866c43abd1..e774b9b03b 100644
--- a/.github/workflows/mypy.yaml
+++ b/.github/workflows/mypy.yaml
@@ -15,7 +15,7 @@ defaults:
jobs:
ci:
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-latest
steps:
# Checkout the code
- uses: actions/checkout@v4
From 183d2973060c653fc393209241b46e4ec807dd7b Mon Sep 17 00:00:00 2001
From: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
Date: Fri, 18 Apr 2025 08:43:27 +0900
Subject: [PATCH 034/156] doc: better document supported platform tiers (#2783)
Fixes #2722.
Related #2734, #2276, #1579
---
docs/support.md | 28 ++++++++++++++++++++++++++--
1 file changed, 26 insertions(+), 2 deletions(-)
diff --git a/docs/support.md b/docs/support.md
index ea099650bd..5e6de57fcb 100644
--- a/docs/support.md
+++ b/docs/support.md
@@ -31,11 +31,35 @@ minor/patch versions.
See [Bazel's release support matrix](https://bazel.build/release#support-matrix)
for what versions are the rolling, active, and prior releases.
+## Supported Python versions
+
+As a general rule we test all released non-EOL Python versions. Different
+interpreter versions may work but are not guaranteed. We are interested in
+staying compatible with upcoming unreleased versions, so if you see that things
+stop working, please create tickets or, more preferably, pull requests.
+
## Supported Platforms
We only support the platforms that our continuous integration jobs run, which
-is Linux, Mac, and Windows. Code to support other platforms is allowed, but
-can only be on a best-effort basis.
+is Linux, Mac, and Windows.
+
+In order to better describe different support levels, the below acts as a rough
+guideline for different platform tiers:
+* Tier 0 - The platforms that our CI runs: `linux_x86_64`, `osx_x86_64`, `RBE linux_x86_64`.
+* Tier 1 - The platforms that are similar enough to what the CI runs: `linux_aarch64`, `osx_arm64`.
+ What is more, `windows_x86_64` is in this list as we run tests in CI but
+ developing for Windows is more challenging and features may come later to
+ this platform.
+* Tier 2 - The rest of the platforms that may have varying level of support, e.g.
+ `linux_s390x`, `linux_ppc64le`, `windows_arm64`.
+
+:::{note}
+Code to support Tier 2 platforms is allowed, but regressions will be fixed on a
+best-effort basis, so feel free to contribute by creating PRs.
+
+If you would like to provide/sponsor CI setup for a platform that is not Tier 0,
+please create a ticket or contact the maintainers on Slack.
+:::
## Compatibility Policy
From abdf560f56490beb43c1e4d72338f8553bc4d73f Mon Sep 17 00:00:00 2001
From: David Sanderson <32687193+dws@users.noreply.github.com>
Date: Fri, 18 Apr 2025 16:04:22 -0400
Subject: [PATCH 035/156] fix(rules): copy_propagating_kwargs() now also copies
target_compatible_with (#2788)
This routine already copies `compatible_with`, which is little used, but
does not copy `target_compatible_with`, which is broadly used. This
seems like an oversight.
I noticed this discrepancy when working on a system that assumes that
any `tags` or `target_compatible_with` parameters supplied to a macro
will propagate to all rules created by that macro. In rules_python, this
already works for `tags`, but not for `target_compatible_with`.
It would be great to get this accepted upstream, so that I can stop
patching rules_python.
---------
Co-authored-by: Richard Levasseur
---
CHANGELOG.md | 2 ++
python/private/util.bzl | 2 +-
2 files changed, 3 insertions(+), 1 deletion(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 299a43e1ff..47ccd2459a 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -100,6 +100,8 @@ Unreleased changes template.
* (toolchains) Ensure temporary `.pyc` and `.pyo` files are also excluded from the interpreters repository files.
* (pypi) Run interpreter version call in isolated mode, to ensure it's not affected by userland environment variables, such as `PYTHONPATH`.
* (packaging) An empty `requires_file` is treated as if it were omitted, resulting in a valid `METADATA` file.
+* (rules) py_wheel and sphinxdocs rules now propagate `target_compatible_with` to all targets they create.
+ [PR #2788](https://github.com/bazel-contrib/rules_python/pull/2788).
{#v0-0-0-added}
### Added
diff --git a/python/private/util.bzl b/python/private/util.bzl
index 33261befaf..4d2da57760 100644
--- a/python/private/util.bzl
+++ b/python/private/util.bzl
@@ -42,7 +42,7 @@ def copy_propagating_kwargs(from_kwargs, into_kwargs = None):
into_kwargs = {}
# Include tags because people generally expect tags to propagate.
- for attr in ("testonly", "tags", "compatible_with", "restricted_to"):
+ for attr in ("testonly", "tags", "compatible_with", "restricted_to", "target_compatible_with"):
if attr in from_kwargs and attr not in into_kwargs:
into_kwargs[attr] = from_kwargs[attr]
return into_kwargs
From 844e7ada6738fc0e1f040df3c967e778af2af1c7 Mon Sep 17 00:00:00 2001
From: Richard Levasseur
Date: Sat, 19 Apr 2025 20:18:40 -0700
Subject: [PATCH 036/156] release: 1.4.0 release prep (#2789)
Updates changelog and version markers.
Also updates the release docs with some shell-one liners to copy and
paste to make it
a bit more mechanical.
---
CHANGELOG.md | 22 ++++++++++++----------
RELEASING.md | 21 +++++++++++++++++++++
python/current_py_toolchain.bzl | 2 +-
python/features.bzl | 2 +-
python/local_toolchains/repos.bzl | 2 +-
python/packaging.bzl | 2 +-
python/private/py_exec_tools_toolchain.bzl | 2 +-
python/private/py_info.bzl | 2 +-
python/private/py_library.bzl | 2 +-
python/private/pypi/extension.bzl | 4 ++--
python/private/python.bzl | 8 ++++----
11 files changed, 46 insertions(+), 23 deletions(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 47ccd2459a..1378853626 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -21,7 +21,7 @@ A brief description of the categories of changes:
`(docs)`.
-{#v0-0-0}
-## Unreleased
+{#1-4-0}
+## [1.4.0] - 2025-04-19
-[0.0.0]: https://github.com/bazel-contrib/rules_python/releases/tag/0.0.0
+[1.4.0]: https://github.com/bazel-contrib/rules_python/releases/tag/1.4.0
-{#v0-0-0-changed}
+{#1-4-0-changed}
### Changed
* (toolchain) The `exec` configuration toolchain now has the forwarded
`exec_interpreter` now also forwards the `ToolchainInfo` provider. This is
@@ -72,7 +74,7 @@ Unreleased changes template.
* (toolchains) Previously [#2636](https://github.com/bazel-contrib/rules_python/pull/2636)
changed the semantics of `ignore_root_user_error` from "ignore" to "warning". This is now
flipped back to ignoring the issue, and will only emit a warning when the attribute is set
- `False`.
+ `False`.
* (pypi) The PyPI extension will no longer write the lock file entries as the
extension has been marked reproducible.
Fixes [#2434](https://github.com/bazel-contrib/rules_python/issues/2434).
@@ -84,7 +86,7 @@ Unreleased changes template.
[20250317]: https://github.com/astral-sh/python-build-standalone/releases/tag/20250317
-{#v0-0-0-fixed}
+{#1-4-0-fixed}
### Fixed
* (pypi) Platform specific extras are now correctly handled when using
universal lock files with environment markers. Fixes [#2690](https://github.com/bazel-contrib/rules_python/pull/2690).
@@ -103,7 +105,7 @@ Unreleased changes template.
* (rules) py_wheel and sphinxdocs rules now propagate `target_compatible_with` to all targets they create.
[PR #2788](https://github.com/bazel-contrib/rules_python/pull/2788).
-{#v0-0-0-added}
+{#1-4-0-added}
### Added
* (pypi) From now on `sha256` values in the `requirements.txt` is no longer
mandatory when enabling {attr}`pip.parse.experimental_index_url` feature.
@@ -134,13 +136,13 @@ Unreleased changes template.
locations equivalents of `$(PYTHON2)` and `$(PYTHON3) respectively.
-{#v0-0-0-removed}
+{#1-4-0-removed}
### Removed
* Nothing removed.
{#v1-3-0}
-## Unreleased
+## [1.3.0] - 2025-03-27
[1.3.0]: https://github.com/bazel-contrib/rules_python/releases/tag/1.3.0
diff --git a/RELEASING.md b/RELEASING.md
index 82510b99c7..c9d46c39f0 100644
--- a/RELEASING.md
+++ b/RELEASING.md
@@ -14,7 +14,14 @@ These are the steps for a regularly scheduled release from HEAD.
1. [Determine the next semantic version number](#determining-semantic-version).
1. Update CHANGELOG.md: replace the `v0-0-0` and `0.0.0` with `X.Y.0`.
+ ```
+ awk -v version=X.Y.0 'BEGIN { hv=version; gsub(/\./, "-", hv) } /END_UNRELEASED_TEMPLATE/ { found_marker = 1 } found_marker { gsub(/v0-0-0/, hv, $0); gsub(/Unreleased/, "[" version "] - " strftime("%Y-%m-%d"), $0); gsub(/0.0.0/, version, $0); } { print } ' CHANGELOG.md > /tmp/changelog && cp /tmp/changelog CHANGELOG.md
+ ```
1. Replace `VERSION_NEXT_*` strings with `X.Y.0`.
+ ```
+ grep -l --exclude=CONTRIBUTING.md --exclude=RELEASING.md --exclude-dir=.* VERSION_NEXT_ -r \
+ | xargs sed -i -e 's/VERSION_NEXT_FEATURE/X.Y.0/' -e 's/VERSION_NEXT_PATCH/X.Y.0/'
+ ```
1. Send these changes for review and get them merged.
1. Create a branch for the new release, named `release/X.Y`
```
@@ -90,6 +97,20 @@ It will be promoted to stable next week, pending feedback.
It's traditional to include notable changes from the changelog, but not
required.
+### Re-releasing a version
+
+Re-releasing a version (i.e. changing the commit a tag points to) is
+*sometimes* possible, but it depends on how far into the release process it got.
+
+The two points of no return are:
+ * If the PyPI package has been published: PyPI disallows using the same
+ filename/version twice. Once published, it cannot be replaced.
+ * If the BCR package has been published: Once it's been committed to the BCR
+ registry, it cannot be replaced.
+
+If release steps fail _prior_ to those steps, then its OK to change the tag. You
+may need to manually delete the GitHub release.
+
## Secrets
### PyPI user rules-python
diff --git a/python/current_py_toolchain.bzl b/python/current_py_toolchain.bzl
index f5c5638a88..0ca5c90ccc 100644
--- a/python/current_py_toolchain.bzl
+++ b/python/current_py_toolchain.bzl
@@ -52,7 +52,7 @@ current_py_toolchain = rule(
happened, to a rule which expects a concrete implementation of a toolchain, rather than a
toolchain_type which could be resolved to that toolchain.
- :::{versionchanged} VERSION_NEXT_FEATURE
+ :::{versionchanged} 1.4.0
From now on, we also expose `$(PYTHON2_ROOTPATH)` and `$(PYTHON3_ROOTPATH)` which are runfiles
locations equivalents of `$(PYTHON2)` and `$(PYTHON3) respectively.
:::
diff --git a/python/features.bzl b/python/features.bzl
index 8edfb698fc..917bd3800c 100644
--- a/python/features.bzl
+++ b/python/features.bzl
@@ -35,7 +35,7 @@ def _features_typedef():
True if the `PyInfo.site_packages_symlinks` field is available.
- :::{versionadded} VERSION_NEXT_FEATURE
+ :::{versionadded} 1.4.0
:::
::::
diff --git a/python/local_toolchains/repos.bzl b/python/local_toolchains/repos.bzl
index d1b45cfd7f..320e503e1a 100644
--- a/python/local_toolchains/repos.bzl
+++ b/python/local_toolchains/repos.bzl
@@ -1,6 +1,6 @@
"""Rules/macros for repository phase for local toolchains.
-:::{versionadded} VERSION_NEXT_FEATURE
+:::{versionadded} 1.4.0
:::
"""
diff --git a/python/packaging.bzl b/python/packaging.bzl
index b190635cfe..223aba142d 100644
--- a/python/packaging.bzl
+++ b/python/packaging.bzl
@@ -101,7 +101,7 @@ def py_wheel(
Currently only pure-python wheels are supported.
- :::{versionchanged} VERSION_NEXT_FEATURE
+ :::{versionchanged} 1.4.0
From now on, an empty `requires_file` is treated as if it were omitted, resulting in a valid
`METADATA` file.
:::
diff --git a/python/private/py_exec_tools_toolchain.bzl b/python/private/py_exec_tools_toolchain.bzl
index ff30431ff4..332570b26b 100644
--- a/python/private/py_exec_tools_toolchain.bzl
+++ b/python/private/py_exec_tools_toolchain.bzl
@@ -77,7 +77,7 @@ handle all the necessary transitions and runtime setup to invoke a program.
See {obj}`PyExecToolsInfo.exec_interpreter` for further docs.
-:::{versionchanged} VERSION_NEXT_FEATURE
+:::{versionchanged} 1.4.0
From now on the provided target also needs to provide `platform_common.ToolchainInfo`
so that the toolchain `py_runtime` field can be correctly forwarded.
:::
diff --git a/python/private/py_info.bzl b/python/private/py_info.bzl
index 4ecd02a438..dc3cb24c51 100644
--- a/python/private/py_info.bzl
+++ b/python/private/py_info.bzl
@@ -168,7 +168,7 @@ values from further way dependencies, such as forcing symlinks to point to
specific paths or preventing symlinks from being created.
:::
-:::{versionadded} VERSION_NEXT_FEATURE
+:::{versionadded} 1.4.0
:::
""",
"transitive_implicit_pyc_files": """
diff --git a/python/private/py_library.bzl b/python/private/py_library.bzl
index edd0db579f..6b5882de5a 100644
--- a/python/private/py_library.bzl
+++ b/python/private/py_library.bzl
@@ -94,7 +94,7 @@ to a consumer have precedence. See {obj}`PyInfo.site_packages_symlinks` for
more information.
:::
-:::{versionadded} VERSION_NEXT_FEATURE
+:::{versionadded} 1.4.0
:::
""",
),
diff --git a/python/private/pypi/extension.bzl b/python/private/pypi/extension.bzl
index d2ae132741..68776e32d0 100644
--- a/python/private/pypi/extension.bzl
+++ b/python/private/pypi/extension.bzl
@@ -686,7 +686,7 @@ If {attr}`download_only` is set, then `sdist` archives will be discarded and `pi
operate in wheel-only mode.
:::
-:::{versionchanged} VERSION_NEXT_FEATURE
+:::{versionchanged} 1.4.0
Index metadata will be used to deduct `sha256` values for packages even if the
`sha256` values are not present in the requirements.txt lock file.
:::
@@ -767,7 +767,7 @@ to `rules_python` and use this attribute until the bug is fixed.
EXPERIMENTAL: this may be removed without notice.
-:::{versionadded} VERSION_NEXT_FEATURE
+:::{versionadded} 1.4.0
:::
""",
),
diff --git a/python/private/python.bzl b/python/private/python.bzl
index efc429420e..f49fb26d52 100644
--- a/python/private/python.bzl
+++ b/python/private/python.bzl
@@ -695,7 +695,7 @@ matches the {attr}`python_version` attribute of a toolchain, this
toolchain is the default version. If this attribute is set, the
{attr}`is_default` attribute of the toolchain is ignored.
-:::{versionadded} VERSION_NEXT_FEATURE
+:::{versionadded} 1.4.0
:::
""",
),
@@ -707,7 +707,7 @@ If the string matches the {attr}`python_version` attribute of a
toolchain, this toolchain is the default version. If this attribute is
set, the {attr}`is_default` attribute of the toolchain is ignored.
-:::{versionadded} VERSION_NEXT_FEATURE
+:::{versionadded} 1.4.0
:::
""",
),
@@ -720,7 +720,7 @@ of the file match the {attr}`python_version` attribute of a toolchain,
this toolchain is the default version. If this attribute is set, the
{attr}`is_default` attribute of the toolchain is ignored.
-:::{versionadded} VERSION_NEXT_FEATURE
+:::{versionadded} 1.4.0
:::
""",
),
@@ -813,7 +813,7 @@ this to `False`.
doc = """\
Whether the toolchain is the default version.
-:::{versionchanged} VERSION_NEXT_FEATURE
+:::{versionchanged} 1.4.0
This setting is ignored if the default version is set using the `defaults`
tag class.
:::
From cc46fb26d629b9e440371861f031cb2a85fd9c55 Mon Sep 17 00:00:00 2001
From: Guillaume Maudoux
Date: Sun, 20 Apr 2025 08:05:13 +0200
Subject: [PATCH 037/156] fix: declare PyInfo as provided by
test/binary/library (#2777)
Currently, the rules don't advertise the PyInfo provider through the
provides argument
to the rule function. This means that aspects that want to consume
PyInfo can't use
`required_providers` to restrict themselves to the Python rules, and
instead have to
apply to all rules.
To fix, add PyInfo to the provides arg of the rules.
Fixes https://github.com/bazel-contrib/rules_python/issues/2506
---------
Co-authored-by: Richard Levasseur
Co-authored-by: Richard Levasseur
---
CHANGELOG.md | 22 ++++++++++++++++++++++
python/private/py_executable.bzl | 4 +++-
python/private/py_library.bzl | 5 +++++
3 files changed, 30 insertions(+), 1 deletion(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 1378853626..cad074e6a6 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -47,6 +47,28 @@ BEGIN_UNRELEASED_TEMPLATE
END_UNRELEASED_TEMPLATE
-->
+{#v0-0-0}
+## Unreleased
+
+[0.0.0]: https://github.com/bazel-contrib/rules_python/releases/tag/0.0.0
+
+{#v0-0-0-changed}
+### Changed
+* Nothing changed.
+
+{#v0-0-0-fixed}
+### Fixed
+* (rules) PyInfo provider is now advertised by py_test, py_binary, and py_library;
+ this allows aspects using required_providers to function correctly.
+ ([#2506](https://github.com/bazel-contrib/rules_python/issues/2506)).
+
+{#v0-0-0-added}
+### Added
+* Nothing added.
+
+{#v0-0-0-removed}
+### Removed
+* Nothing removed.
{#1-4-0}
## [1.4.0] - 2025-04-19
diff --git a/python/private/py_executable.bzl b/python/private/py_executable.bzl
index dd3ad869fa..b4cda21b1d 100644
--- a/python/private/py_executable.bzl
+++ b/python/private/py_executable.bzl
@@ -1854,6 +1854,8 @@ def create_base_executable_rule():
"""
return create_executable_rule_builder().build()
+_MaybeBuiltinPyInfo = [BuiltinPyInfo] if BuiltinPyInfo != None else []
+
# NOTE: Exported publicly
def create_executable_rule_builder(implementation, **kwargs):
"""Create a rule builder for an executable Python program.
@@ -1877,7 +1879,7 @@ def create_executable_rule_builder(implementation, **kwargs):
attrs = EXECUTABLE_ATTRS,
exec_groups = dict(REQUIRED_EXEC_GROUP_BUILDERS), # Mutable copy
fragments = ["py", "bazel_py"],
- provides = [PyExecutableInfo],
+ provides = [PyExecutableInfo, PyInfo] + _MaybeBuiltinPyInfo,
toolchains = [
ruleb.ToolchainType(TOOLCHAIN_TYPE),
ruleb.ToolchainType(EXEC_TOOLS_TOOLCHAIN_TYPE, mandatory = False),
diff --git a/python/private/py_library.bzl b/python/private/py_library.bzl
index 6b5882de5a..bf0c25439e 100644
--- a/python/private/py_library.bzl
+++ b/python/private/py_library.bzl
@@ -43,7 +43,9 @@ load(
load(":flags.bzl", "AddSrcsToRunfilesFlag", "PrecompileFlag", "VenvsSitePackages")
load(":precompile.bzl", "maybe_precompile")
load(":py_cc_link_params_info.bzl", "PyCcLinkParamsInfo")
+load(":py_info.bzl", "PyInfo")
load(":py_internal.bzl", "py_internal")
+load(":reexports.bzl", "BuiltinPyInfo")
load(":rule_builders.bzl", "ruleb")
load(
":toolchain_types.bzl",
@@ -299,6 +301,8 @@ def _repo_relative_short_path(short_path):
else:
return short_path
+_MaybeBuiltinPyInfo = [BuiltinPyInfo] if BuiltinPyInfo != None else []
+
# NOTE: Exported publicaly
def create_py_library_rule_builder():
"""Create a rule builder for a py_library.
@@ -319,6 +323,7 @@ def create_py_library_rule_builder():
exec_groups = dict(REQUIRED_EXEC_GROUP_BUILDERS),
attrs = LIBRARY_ATTRS,
fragments = ["py"],
+ provides = [PyCcLinkParamsInfo, PyInfo] + _MaybeBuiltinPyInfo,
toolchains = [
ruleb.ToolchainType(TOOLCHAIN_TYPE, mandatory = False),
ruleb.ToolchainType(EXEC_TOOLS_TOOLCHAIN_TYPE, mandatory = False),
From a19e1e41a609dd10ae6cdc49d76eb1f119145d2e Mon Sep 17 00:00:00 2001
From: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
Date: Sun, 20 Apr 2025 19:17:59 +0900
Subject: [PATCH 038/156] fix: load target_platforms through the hub (#2781)
This PR moves the parsing of `Requires-Dist` to the loading phase
within the `whl_library_targets_from_requires` macro. The original
`whl_library_targets` macro has been left unchanged so that I don't have
to reinvent the unit tests - it is well covered under tests.
Before this PR we had to wire the `target_platforms` via the
`experimental_target_platforms` attr in the `whl_library`, which means
that whenever this would change (e.g. the minor Python version changes),
the wheel would be re-extracted even though the final result may be the
same.
This refactor uncovered that the dependency graph creation was incorrect
if we had multiple target Python versions due to various heuristics that
this had. In hindsight I had them to make the generated `BUILD.bazel`
files more readable when the unit test coverage was not great. Now this
is unnecessary and since everything is happening in Starlark I thought
that having a simpler algorithm that does the right thing always is the
best way.
This also cleans up the code by removing left over TODO notes or code
that no longer make sense.
Work towards #260, #2319
---
CHANGELOG.md | 7 +
config.bzl.tmpl.bzlmod | 0
python/private/pypi/BUILD.bazel | 14 +-
python/private/pypi/attrs.bzl | 3 +
python/private/pypi/config.bzl.tmpl.bzlmod | 9 +
python/private/pypi/extension.bzl | 41 ++--
.../pypi/generate_whl_library_build_bazel.bzl | 27 +-
python/private/pypi/hub_repository.bzl | 18 +-
python/private/pypi/pep508.bzl | 23 --
python/private/pypi/pep508_deps.bzl | 231 ++++--------------
python/private/pypi/pep508_requirement.bzl | 4 +-
python/private/pypi/whl_library.bzl | 97 +++-----
python/private/pypi/whl_library_targets.bzl | 83 +++++++
tests/pypi/extension/extension_tests.bzl | 10 -
...generate_whl_library_build_bazel_tests.bzl | 92 +++++--
tests/pypi/pep508/deps_tests.bzl | 191 ++++++---------
.../whl_library_targets_tests.bzl | 67 ++++-
17 files changed, 451 insertions(+), 466 deletions(-)
create mode 100644 config.bzl.tmpl.bzlmod
create mode 100644 python/private/pypi/config.bzl.tmpl.bzlmod
delete mode 100644 python/private/pypi/pep508.bzl
diff --git a/CHANGELOG.md b/CHANGELOG.md
index cad074e6a6..154b66114b 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -105,6 +105,13 @@ END_UNRELEASED_TEMPLATE
[PR #2746](https://github.com/bazel-contrib/rules_python/pull/2746).
* (rules) {attr}`py_binary.srcs` and {attr}`py_test.srcs` is no longer mandatory when
`main_module` is specified (for `--bootstrap_impl=script`)
+* (pypi) From now on the `Requires-Dist` from the wheel metadata is analysed in
+ the loading phase instead of repository rule phase giving better caching
+ performance when the target platforms are changed (e.g. target python
+ versions). This is preparatory work for stabilizing the cross-platform wheel
+ support. From now on the usage of `experimental_target_platforms` should be
+ avoided and the `requirements_by_platform` values should be instead used to
+ specify the target platforms for the given dependencies.
[20250317]: https://github.com/astral-sh/python-build-standalone/releases/tag/20250317
diff --git a/config.bzl.tmpl.bzlmod b/config.bzl.tmpl.bzlmod
new file mode 100644
index 0000000000..e69de29bb2
diff --git a/python/private/pypi/BUILD.bazel b/python/private/pypi/BUILD.bazel
index 7297238cb4..a758b3f153 100644
--- a/python/private/pypi/BUILD.bazel
+++ b/python/private/pypi/BUILD.bazel
@@ -212,15 +212,6 @@ bzl_library(
],
)
-bzl_library(
- name = "pep508_bzl",
- srcs = ["pep508.bzl"],
- deps = [
- ":pep508_env_bzl",
- ":pep508_evaluate_bzl",
- ],
-)
-
bzl_library(
name = "pep508_deps_bzl",
srcs = ["pep508_deps.bzl"],
@@ -378,13 +369,12 @@ bzl_library(
":attrs_bzl",
":deps_bzl",
":generate_whl_library_build_bazel_bzl",
- ":parse_whl_name_bzl",
":patch_whl_bzl",
- ":pep508_deps_bzl",
+ ":pep508_requirement_bzl",
":pypi_repo_utils_bzl",
":whl_metadata_bzl",
- ":whl_target_platforms_bzl",
"//python/private:auth_bzl",
+ "//python/private:bzlmod_enabled_bzl",
"//python/private:envsubst_bzl",
"//python/private:is_standalone_interpreter_bzl",
"//python/private:repo_utils_bzl",
diff --git a/python/private/pypi/attrs.bzl b/python/private/pypi/attrs.bzl
index 9d88c1e32c..fe35d8bf7d 100644
--- a/python/private/pypi/attrs.bzl
+++ b/python/private/pypi/attrs.bzl
@@ -123,6 +123,9 @@ Warning:
"experimental_target_platforms": attr.string_list(
default = [],
doc = """\
+*NOTE*: This will be removed in the next major version, so please consider migrating
+to `bzlmod` and rely on {attr}`pip.parse.requirements_by_platform` for this feature.
+
A list of platforms that we will generate the conditional dependency graph for
cross platform wheels by parsing the wheel metadata. This will generate the
correct dependencies for packages like `sphinx` or `pylint`, which include
diff --git a/python/private/pypi/config.bzl.tmpl.bzlmod b/python/private/pypi/config.bzl.tmpl.bzlmod
new file mode 100644
index 0000000000..deb53631d1
--- /dev/null
+++ b/python/private/pypi/config.bzl.tmpl.bzlmod
@@ -0,0 +1,9 @@
+"""Extra configuration values that are exposed from the hub repository for spoke repositories to access.
+
+NOTE: This is internal `rules_python` API and if you would like to depend on it, please raise an issue
+with your usecase. This may change in between rules_python versions without any notice.
+
+@generated by rules_python pip.parse bzlmod extension.
+"""
+
+target_platforms = %%TARGET_PLATFORMS%%
diff --git a/python/private/pypi/extension.bzl b/python/private/pypi/extension.bzl
index 68776e32d0..d1895ca211 100644
--- a/python/private/pypi/extension.bzl
+++ b/python/private/pypi/extension.bzl
@@ -32,7 +32,6 @@ load(":simpleapi_download.bzl", "simpleapi_download")
load(":whl_config_setting.bzl", "whl_config_setting")
load(":whl_library.bzl", "whl_library")
load(":whl_repo_name.bzl", "pypi_repo_name", "whl_repo_name")
-load(":whl_target_platforms.bzl", "whl_target_platforms")
def _major_minor_version(version):
version = semver(version)
@@ -68,7 +67,6 @@ def _create_whl_repos(
*,
pip_attr,
whl_overrides,
- evaluate_markers = evaluate_markers,
available_interpreters = INTERPRETER_LABELS,
get_index_urls = None):
"""create all of the whl repositories
@@ -77,7 +75,6 @@ def _create_whl_repos(
module_ctx: {type}`module_ctx`.
pip_attr: {type}`struct` - the struct that comes from the tag class iteration.
whl_overrides: {type}`dict[str, struct]` - per-wheel overrides.
- evaluate_markers: the function to use to evaluate markers.
get_index_urls: A function used to get the index URLs
available_interpreters: {type}`dict[str, Label]` The dictionary of available
interpreters that have been registered using the `python` bzlmod extension.
@@ -162,14 +159,12 @@ def _create_whl_repos(
requirements_osx = pip_attr.requirements_darwin,
requirements_windows = pip_attr.requirements_windows,
extra_pip_args = pip_attr.extra_pip_args,
+ # TODO @aignas 2025-04-15: pass the full version into here
python_version = major_minor,
logger = logger,
),
extra_pip_args = pip_attr.extra_pip_args,
get_index_urls = get_index_urls,
- # NOTE @aignas 2025-02-24: we will use the "cp3xx_os_arch" platform labels
- # for converting to the PEP508 environment and will evaluate them in starlark
- # without involving the interpreter at all.
evaluate_markers = evaluate_markers,
logger = logger,
)
@@ -191,7 +186,6 @@ def _create_whl_repos(
enable_implicit_namespace_pkgs = pip_attr.enable_implicit_namespace_pkgs,
environment = pip_attr.environment,
envsubst = pip_attr.envsubst,
- experimental_target_platforms = pip_attr.experimental_target_platforms,
group_deps = group_deps,
group_name = group_name,
pip_data_exclude = pip_attr.pip_data_exclude,
@@ -244,6 +238,12 @@ def _create_whl_repos(
},
extra_aliases = extra_aliases,
whl_libraries = whl_libraries,
+ target_platforms = {
+ plat: None
+ for reqs in requirements_by_platform.values()
+ for req in reqs
+ for plat in req.target_platforms
+ },
)
def _whl_repos(*, requirement, whl_library_args, download_only, netrc, auth_patterns, multiple_requirements_for_whl = False, python_version):
@@ -274,20 +274,11 @@ def _whl_repos(*, requirement, whl_library_args, download_only, netrc, auth_patt
args["urls"] = [distribution.url]
args["sha256"] = distribution.sha256
args["filename"] = distribution.filename
- args["experimental_target_platforms"] = requirement.target_platforms
# Pure python wheels or sdists may need to have a platform here
target_platforms = None
if distribution.filename.endswith(".whl") and not distribution.filename.endswith("-any.whl"):
- parsed_whl = parse_whl_name(distribution.filename)
- whl_platforms = whl_target_platforms(
- platform_tag = parsed_whl.platform_tag,
- )
- args["experimental_target_platforms"] = [
- p
- for p in requirement.target_platforms
- if [None for wp in whl_platforms if p.endswith(wp.target_platform)]
- ]
+ pass
elif multiple_requirements_for_whl:
target_platforms = requirement.target_platforms
@@ -416,6 +407,7 @@ You cannot use both the additive_build_content and additive_build_content_file a
hub_group_map = {}
exposed_packages = {}
extra_aliases = {}
+ target_platforms = {}
whl_libraries = {}
for mod in module_ctx.modules:
@@ -498,6 +490,7 @@ You cannot use both the additive_build_content and additive_build_content_file a
for whl_name, aliases in out.extra_aliases.items():
extra_aliases[hub_name].setdefault(whl_name, {}).update(aliases)
exposed_packages.setdefault(hub_name, {}).update(out.exposed_packages)
+ target_platforms.setdefault(hub_name, {}).update(out.target_platforms)
whl_libraries.update(out.whl_libraries)
# TODO @aignas 2024-04-05: how do we support different requirement
@@ -535,6 +528,10 @@ You cannot use both the additive_build_content and additive_build_content_file a
}
for hub_name, extra_whl_aliases in extra_aliases.items()
},
+ target_platforms = {
+ hub_name: sorted(p)
+ for hub_name, p in target_platforms.items()
+ },
whl_libraries = {
k: dict(sorted(args.items()))
for k, args in sorted(whl_libraries.items())
@@ -626,15 +623,13 @@ def _pip_impl(module_ctx):
},
packages = mods.exposed_packages.get(hub_name, []),
groups = mods.hub_group_map.get(hub_name),
+ target_platforms = mods.target_platforms.get(hub_name, []),
)
if bazel_features.external_deps.extension_metadata_has_reproducible:
- # If we are not using the `experimental_index_url feature, the extension is fully
- # deterministic and we don't need to create a lock entry for it.
- #
- # In order to be able to dogfood the `experimental_index_url` feature before it gets
- # stabilized, we have created the `_pip_non_reproducible` function, that will result
- # in extra entries in the lock file.
+ # NOTE @aignas 2025-04-15: this is set to be reproducible, because the
+ # results after calling the PyPI index should be reproducible on each
+ # machine.
return module_ctx.extension_metadata(reproducible = True)
else:
return None
diff --git a/python/private/pypi/generate_whl_library_build_bazel.bzl b/python/private/pypi/generate_whl_library_build_bazel.bzl
index 8050cd22ad..7988aca1c4 100644
--- a/python/private/pypi/generate_whl_library_build_bazel.bzl
+++ b/python/private/pypi/generate_whl_library_build_bazel.bzl
@@ -21,23 +21,23 @@ _RENDER = {
"copy_files": render.dict,
"data": render.list,
"data_exclude": render.list,
- "dependencies": render.list,
- "dependencies_by_platform": lambda x: render.dict(x, value_repr = render.list),
"entry_points": render.dict,
+ "extras": render.list,
"group_deps": render.list,
+ "requires_dist": render.list,
"srcs_exclude": render.list,
- "tags": render.list,
+ "target_platforms": lambda x: render.list(x) if x else "target_platforms",
}
# NOTE @aignas 2024-10-25: We have to keep this so that files in
# this repository can be publicly visible without the need for
# export_files
_TEMPLATE = """\
-load("@rules_python//python/private/pypi:whl_library_targets.bzl", "whl_library_targets")
+{loads}
package(default_visibility = ["//visibility:public"])
-whl_library_targets(
+whl_library_targets_from_requires(
{kwargs}
)
"""
@@ -45,11 +45,13 @@ whl_library_targets(
def generate_whl_library_build_bazel(
*,
annotation = None,
+ default_python_version = None,
**kwargs):
"""Generate a BUILD file for an unzipped Wheel
Args:
annotation: The annotation for the build file.
+ default_python_version: The python version to use to parse the METADATA.
**kwargs: Extra args serialized to be passed to the
{obj}`whl_library_targets`.
@@ -57,6 +59,18 @@ def generate_whl_library_build_bazel(
A complete BUILD file as a string
"""
+ loads = [
+ """load("@rules_python//python/private/pypi:whl_library_targets.bzl", "whl_library_targets_from_requires")""",
+ ]
+ if not kwargs.setdefault("target_platforms", None):
+ dep_template = kwargs["dep_template"]
+ loads.append(
+ "load(\"{}\", \"{}\")".format(
+ dep_template.format(name = "", target = "config.bzl"),
+ "target_platforms",
+ ),
+ )
+
additional_content = []
if annotation:
kwargs["data"] = annotation.data
@@ -66,10 +80,13 @@ def generate_whl_library_build_bazel(
kwargs["srcs_exclude"] = annotation.srcs_exclude_glob
if annotation.additive_build_content:
additional_content.append(annotation.additive_build_content)
+ if default_python_version:
+ kwargs["default_python_version"] = default_python_version
contents = "\n".join(
[
_TEMPLATE.format(
+ loads = "\n".join(loads),
kwargs = render.indent("\n".join([
"{} = {},".format(k, _RENDER.get(k, repr)(v))
for k, v in sorted(kwargs.items())
diff --git a/python/private/pypi/hub_repository.bzl b/python/private/pypi/hub_repository.bzl
index 48245b4106..d2cbf88c24 100644
--- a/python/private/pypi/hub_repository.bzl
+++ b/python/private/pypi/hub_repository.bzl
@@ -45,7 +45,14 @@ def _impl(rctx):
macro_tmpl = "@@{name}//{{}}:{{}}".format(name = rctx.attr.name)
rctx.file("BUILD.bazel", _BUILD_FILE_CONTENTS)
- rctx.template("requirements.bzl", rctx.attr._template, substitutions = {
+ rctx.template(
+ "config.bzl",
+ rctx.attr._config_template,
+ substitutions = {
+ "%%TARGET_PLATFORMS%%": render.list(rctx.attr.target_platforms),
+ },
+ )
+ rctx.template("requirements.bzl", rctx.attr._requirements_bzl_template, substitutions = {
"%%ALL_DATA_REQUIREMENTS%%": render.list([
macro_tmpl.format(p, "data")
for p in bzl_packages
@@ -80,6 +87,10 @@ The list of packages that will be exposed via all_*requirements macros. Defaults
mandatory = True,
doc = "The apparent name of the repo. This is needed because in bzlmod, the name attribute becomes the canonical name.",
),
+ "target_platforms": attr.string_list(
+ mandatory = True,
+ doc = "All of the target platforms for the hub repo",
+ ),
"whl_map": attr.string_dict(
mandatory = True,
doc = """\
@@ -87,7 +98,10 @@ The wheel map where values are json.encoded strings of the whl_map constructed
in the pip.parse tag class.
""",
),
- "_template": attr.label(
+ "_config_template": attr.label(
+ default = ":config.bzl.tmpl.bzlmod",
+ ),
+ "_requirements_bzl_template": attr.label(
default = ":requirements.bzl.tmpl.bzlmod",
),
},
diff --git a/python/private/pypi/pep508.bzl b/python/private/pypi/pep508.bzl
deleted file mode 100644
index e74352def2..0000000000
--- a/python/private/pypi/pep508.bzl
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2025 The Bazel Authors. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""This module is for implementing PEP508 in starlark as FeatureFlagInfo
-"""
-
-load(":pep508_env.bzl", _env = "env")
-load(":pep508_evaluate.bzl", _evaluate = "evaluate", _to_string = "to_string")
-
-to_string = _to_string
-evaluate = _evaluate
-env = _env
diff --git a/python/private/pypi/pep508_deps.bzl b/python/private/pypi/pep508_deps.bzl
index af0a75362b..115bbd78d8 100644
--- a/python/private/pypi/pep508_deps.bzl
+++ b/python/private/pypi/pep508_deps.bzl
@@ -15,36 +15,24 @@
"""This module is for implementing PEP508 compliant METADATA deps parsing.
"""
+load("@pythons_hub//:versions.bzl", "DEFAULT_PYTHON_VERSION")
load("//python/private:normalize_name.bzl", "normalize_name")
load(":pep508_env.bzl", "env")
load(":pep508_evaluate.bzl", "evaluate")
load(":pep508_platform.bzl", "platform", "platform_from_str")
load(":pep508_requirement.bzl", "requirement")
-_ALL_OS_VALUES = [
- "windows",
- "osx",
- "linux",
-]
-_ALL_ARCH_VALUES = [
- "aarch64",
- "ppc64",
- "ppc64le",
- "s390x",
- "x86_32",
- "x86_64",
-]
-
-def deps(name, *, requires_dist, platforms = [], extras = [], host_python_version = None):
+def deps(name, *, requires_dist, platforms = [], extras = [], excludes = [], default_python_version = None):
"""Parse the RequiresDist from wheel METADATA
Args:
name: {type}`str` the name of the wheel.
requires_dist: {type}`list[str]` the list of RequiresDist lines from the
METADATA file.
+ excludes: {type}`list[str]` what packages should we exclude.
extras: {type}`list[str]` the requested extras to generate targets for.
platforms: {type}`list[str]` the list of target platform strings.
- host_python_version: {type}`str` the host python version.
+ default_python_version: {type}`str` the host python version.
Returns:
A struct with attributes:
@@ -62,18 +50,17 @@ def deps(name, *, requires_dist, platforms = [], extras = [], host_python_versio
want_extras = _resolve_extras(name, reqs, extras)
# drop self edges
- reqs = [r for r in reqs if r.name != name]
+ excludes = [name] + [normalize_name(x) for x in excludes]
+ default_python_version = default_python_version or DEFAULT_PYTHON_VERSION
platforms = [
- platform_from_str(p, python_version = host_python_version)
+ platform_from_str(p, python_version = default_python_version)
for p in platforms
- ] or [
- platform_from_str("", python_version = host_python_version),
]
abis = sorted({p.abi: True for p in platforms if p.abi})
- if host_python_version and len(abis) > 1:
- _, _, minor_version = host_python_version.partition(".")
+ if default_python_version and len(abis) > 1:
+ _, _, minor_version = default_python_version.partition(".")
minor_version, _, _ = minor_version.partition(".")
default_abi = "cp3" + minor_version
elif len(abis) > 1:
@@ -83,11 +70,20 @@ def deps(name, *, requires_dist, platforms = [], extras = [], host_python_versio
else:
default_abi = None
+ reqs_by_name = {}
+
for req in reqs:
- _add_req(
+ if req.name_ in excludes:
+ continue
+
+ reqs_by_name.setdefault(req.name, []).append(req)
+
+ for name, reqs in reqs_by_name.items():
+ _add_reqs(
deps,
deps_select,
- req,
+ normalize_name(name),
+ reqs,
extras = want_extras,
platforms = platforms,
default_abi = default_abi,
@@ -103,49 +99,14 @@ def deps(name, *, requires_dist, platforms = [], extras = [], host_python_versio
def _platform_str(self):
if self.abi == None:
- if not self.os and not self.arch:
- return "//conditions:default"
- elif not self.arch:
- return "@platforms//os:{}".format(self.os)
- else:
- return "{}_{}".format(self.os, self.arch)
+ return "{}_{}".format(self.os, self.arch)
- minor_version = self.abi[3:]
- if self.arch == None and self.os == None:
- return str(Label("//python/config_settings:is_python_3.{}".format(minor_version)))
-
- return "cp3{}_{}_{}".format(
- minor_version,
+ return "{}_{}_{}".format(
+ self.abi,
self.os or "anyos",
self.arch or "anyarch",
)
-def _platform_specializations(self, cpu_values = _ALL_ARCH_VALUES, os_values = _ALL_OS_VALUES):
- """Return the platform itself and all its unambiguous specializations.
-
- For more info about specializations see
- https://bazel.build/docs/configurable-attributes
- """
- specializations = []
- specializations.append(self)
- if self.arch == None:
- specializations.extend([
- platform(os = self.os, arch = arch, abi = self.abi)
- for arch in cpu_values
- ])
- if self.os == None:
- specializations.extend([
- platform(os = os, arch = self.arch, abi = self.abi)
- for os in os_values
- ])
- if self.os == None and self.arch == None:
- specializations.extend([
- platform(os = os, arch = arch, abi = self.abi)
- for os in os_values
- for arch in cpu_values
- ])
- return specializations
-
def _add(deps, deps_select, dep, platform):
dep = normalize_name(dep)
@@ -172,53 +133,7 @@ def _add(deps, deps_select, dep, platform):
return
# Add the platform-specific branch
- deps_select.setdefault(platform, {})
-
- # Add the dep to specializations of the given platform if they
- # exist in the select statement.
- for p in _platform_specializations(platform):
- if p not in deps_select:
- continue
-
- deps_select[p][dep] = True
-
- if len(deps_select[platform]) == 1:
- # We are adding a new item to the select and we need to ensure that
- # existing dependencies from less specialized platforms are propagated
- # to the newly added dependency set.
- for p, _deps in deps_select.items():
- # Check if the existing platform overlaps with the given platform
- if p == platform or platform not in _platform_specializations(p):
- continue
-
- deps_select[platform].update(_deps)
-
-def _maybe_add_common_dep(deps, deps_select, platforms, dep):
- abis = sorted({p.abi: True for p in platforms if p.abi})
- if len(abis) < 2:
- return
-
- platforms = [platform()] + [
- platform(abi = abi)
- for abi in abis
- ]
-
- # If the dep is targeting all target python versions, lets add it to
- # the common dependency list to simplify the select statements.
- for p in platforms:
- if p not in deps_select:
- return
-
- if dep not in deps_select[p]:
- return
-
- # All of the python version-specific branches have the dep, so lets add
- # it to the common deps.
- deps[dep] = True
- for p in platforms:
- deps_select[p].pop(dep)
- if not deps_select[p]:
- deps_select.pop(p)
+ deps_select.setdefault(platform, {})[dep] = True
def _resolve_extras(self_name, reqs, extras):
"""Resolve extras which are due to depending on self[some_other_extra].
@@ -275,77 +190,37 @@ def _resolve_extras(self_name, reqs, extras):
# Poor mans set
return sorted({x: None for x in extras})
-def _add_req(deps, deps_select, req, *, extras, platforms, default_abi = None):
- if not req.marker:
- _add(deps, deps_select, req.name, None)
- return
-
- # NOTE @aignas 2023-12-08: in order to have reasonable select statements
- # we do have to have some parsing of the markers, so it begs the question
- # if packaging should be reimplemented in Starlark to have the best solution
- # for now we will implement it in Python and see what the best parsing result
- # can be before making this decision.
- match_os = len([
- tag
- for tag in [
- "os_name",
- "sys_platform",
- "platform_system",
- ]
- if tag in req.marker
- ]) > 0
- match_arch = "platform_machine" in req.marker
- match_version = "version" in req.marker
-
- if not (match_os or match_arch or match_version):
- if [
- True
- for extra in extras
- for p in platforms
- if evaluate(
- req.marker,
- env = env(
- target_platform = p,
- extra = extra,
- ),
- )
- ]:
- _add(deps, deps_select, req.name, None)
- return
+def _add_reqs(deps, deps_select, dep, reqs, *, extras, platforms, default_abi = None):
+ for req in reqs:
+ if not req.marker:
+ _add(deps, deps_select, dep, None)
+ return
+ platforms_to_add = {}
for plat in platforms:
- if not [
- True
- for extra in extras
- if evaluate(
- req.marker,
- env = env(
- target_platform = plat,
- extra = extra,
- ),
- )
- ]:
+ if plat in platforms_to_add:
+ # marker evaluation is more expensive than this check
continue
- if match_arch and default_abi:
- _add(deps, deps_select, req.name, plat)
- if plat.abi == default_abi:
- _add(deps, deps_select, req.name, platform(os = plat.os, arch = plat.arch))
- elif match_arch:
- _add(deps, deps_select, req.name, platform(os = plat.os, arch = plat.arch))
- elif match_os and default_abi:
- _add(deps, deps_select, req.name, platform(os = plat.os, abi = plat.abi))
- if plat.abi == default_abi:
- _add(deps, deps_select, req.name, platform(os = plat.os))
- elif match_os:
- _add(deps, deps_select, req.name, platform(os = plat.os))
- elif match_version and default_abi:
- _add(deps, deps_select, req.name, platform(abi = plat.abi))
- if plat.abi == default_abi:
- _add(deps, deps_select, req.name, platform())
- elif match_version:
- _add(deps, deps_select, req.name, None)
- else:
- fail("BUG: {} support is not implemented".format(req.marker))
+ added = False
+ for extra in extras:
+ if added:
+ break
+
+ for req in reqs:
+ if evaluate(req.marker, env = env(target_platform = plat, extra = extra)):
+ platforms_to_add[plat] = True
+ added = True
+ break
+
+ if len(platforms_to_add) == len(platforms):
+ # the dep is in all target platforms, let's just add it to the regular
+ # list
+ _add(deps, deps_select, dep, None)
+ return
- _maybe_add_common_dep(deps, deps_select, platforms, req.name)
+ for plat in platforms_to_add:
+ if default_abi:
+ _add(deps, deps_select, dep, plat)
+ if plat.abi == default_abi or not default_abi:
+ _add(deps, deps_select, dep, platform(os = plat.os, arch = plat.arch))
diff --git a/python/private/pypi/pep508_requirement.bzl b/python/private/pypi/pep508_requirement.bzl
index ee7b5dfc35..b5be17f890 100644
--- a/python/private/pypi/pep508_requirement.bzl
+++ b/python/private/pypi/pep508_requirement.bzl
@@ -47,9 +47,11 @@ def requirement(spec):
requires, _, _ = requires.partition(char)
extras = extras_unparsed.replace(" ", "").split(",")
name = requires.strip(" ")
+ name = normalize_name(name)
return struct(
- name = normalize_name(name).replace("_", "-"),
+ name = name.replace("_", "-"),
+ name_ = name,
marker = marker.strip(" "),
extras = extras,
version = version,
diff --git a/python/private/pypi/whl_library.bzl b/python/private/pypi/whl_library.bzl
index 0a580011ab..630dc8519f 100644
--- a/python/private/pypi/whl_library.bzl
+++ b/python/private/pypi/whl_library.bzl
@@ -15,6 +15,7 @@
""
load("//python/private:auth.bzl", "AUTH_ATTRS", "get_auth")
+load("//python/private:bzlmod_enabled.bzl", "BZLMOD_ENABLED")
load("//python/private:envsubst.bzl", "envsubst")
load("//python/private:is_standalone_interpreter.bzl", "is_standalone_interpreter")
load("//python/private:repo_utils.bzl", "REPO_DEBUG_ENV_VAR", "repo_utils")
@@ -22,13 +23,10 @@ load(":attrs.bzl", "ATTRS", "use_isolated")
load(":deps.bzl", "all_repo_names", "record_files")
load(":generate_whl_library_build_bazel.bzl", "generate_whl_library_build_bazel")
load(":parse_requirements.bzl", "host_platform")
-load(":parse_whl_name.bzl", "parse_whl_name")
load(":patch_whl.bzl", "patch_whl")
-load(":pep508_deps.bzl", "deps")
load(":pep508_requirement.bzl", "requirement")
load(":pypi_repo_utils.bzl", "pypi_repo_utils")
load(":whl_metadata.bzl", "whl_metadata")
-load(":whl_target_platforms.bzl", "whl_target_platforms")
_CPPFLAGS = "CPPFLAGS"
_COMMAND_LINE_TOOLS_PATH_SLUG = "commandlinetools"
@@ -344,20 +342,6 @@ def _whl_library_impl(rctx):
timeout = rctx.attr.timeout,
)
- target_platforms = rctx.attr.experimental_target_platforms
- if target_platforms:
- parsed_whl = parse_whl_name(whl_path.basename)
- if parsed_whl.platform_tag != "any":
- # NOTE @aignas 2023-12-04: if the wheel is a platform specific
- # wheel, we only include deps for that target platform
- target_platforms = [
- p.target_platform
- for p in whl_target_platforms(
- platform_tag = parsed_whl.platform_tag,
- abi_tag = parsed_whl.abi_tag.strip("tm"),
- )
- ]
-
pypi_repo_utils.execute_checked(
rctx,
op = "whl_library.ExtractWheel({}, {})".format(rctx.attr.name, whl_path),
@@ -400,63 +384,45 @@ def _whl_library_impl(rctx):
)
entry_points[entry_point_without_py] = entry_point_script_name
- # TODO @aignas 2025-04-04: move this to whl_library_targets.bzl to have
- # this in the analysis phase.
- #
- # This means that whl_library_targets will have to accept the following args:
- # * name - the name of the package in the METADATA.
- # * requires_dist - the list of METADATA Requires-Dist.
- # * platforms - the list of target platforms. The target_platforms
- # should come from the hub repo via a 'load' statement so that they don't
- # need to be passed as an argument to `whl_library`.
- # * extras - the list of required extras. This comes from the
- # `rctx.attr.requirement` for now. In the future the required extras could
- # stay in the hub repo, where we calculate the extra aliases that we need
- # to create automatically and this way expose the targets for the specific
- # extras. The first step will be to generate a target per extra for the
- # `py_library` and `filegroup`. Maybe we need to have a special provider
- # or an output group so that we can return the `whl` file from the
- # `py_library` target? filegroup can use output groups to expose files.
- # * host_python_version/versons - the list of python versions to support
- # should come from the hub, similar to how the target platforms are specified.
- #
- # Extra things that we should move at the same time:
- # * group_name, group_deps - this info can stay in the hub repository so that
- # it is piped at the analysis time and changing the requirement groups does
- # cause to re-fetch the deps.
- python_version = metadata["python_version"]
+ if BZLMOD_ENABLED:
+ # The following attributes are unset on bzlmod and we pass data through
+ # the hub via load statements.
+ default_python_version = None
+ target_platforms = []
+ else:
+ # NOTE @aignas 2025-04-16: if BZLMOD_ENABLED, we should use
+ # DEFAULT_PYTHON_VERSION since platforms always come with the actual
+ # python version otherwise we should use the version of the interpreter
+ # here. In WORKSPACE `multi_pip_parse` is using an interpreter for each
+ # `pip_parse` invocation, so we will have the host target platform
+ # only. Even if somebody would change the code to support
+ # `experimental_target_platforms`, they would be for a single python
+ # version. Hence, using the `default_python_version` that we get from the
+ # interpreter is correct. Hence, we unset the argument if we are on bzlmod.
+ default_python_version = metadata["python_version"]
+ target_platforms = rctx.attr.experimental_target_platforms or [host_platform(rctx)]
+
metadata = whl_metadata(
install_dir = rctx.path("site-packages"),
read_fn = rctx.read,
logger = logger,
)
- # TODO @aignas 2025-04-09: this will later be removed when loaded through the hub
- major_minor, _, _ = python_version.rpartition(".")
- package_deps = deps(
- name = metadata.name,
- requires_dist = metadata.requires_dist,
- platforms = target_platforms or [
- "cp{}_{}".format(major_minor.replace(".", ""), host_platform(rctx)),
- ],
- extras = requirement(rctx.attr.requirement).extras,
- host_python_version = python_version,
- )
-
build_file_contents = generate_whl_library_build_bazel(
name = whl_path.basename,
+ metadata_name = metadata.name,
+ metadata_version = metadata.version,
+ requires_dist = metadata.requires_dist,
dep_template = rctx.attr.dep_template or "@{}{{name}}//:{{target}}".format(rctx.attr.repo_prefix),
- dependencies = package_deps.deps,
- dependencies_by_platform = package_deps.deps_select,
- group_name = rctx.attr.group_name,
- group_deps = rctx.attr.group_deps,
- data_exclude = rctx.attr.pip_data_exclude,
- tags = [
- "pypi_name=" + metadata.name,
- "pypi_version=" + metadata.version,
- ],
entry_points = entry_points,
+ target_platforms = target_platforms,
+ default_python_version = default_python_version,
+ # TODO @aignas 2025-04-14: load through the hub:
annotation = None if not rctx.attr.annotation else struct(**json.decode(rctx.read(rctx.attr.annotation))),
+ data_exclude = rctx.attr.pip_data_exclude,
+ extras = requirement(rctx.attr.requirement).extras,
+ group_deps = rctx.attr.group_deps,
+ group_name = rctx.attr.group_name,
)
rctx.file("BUILD.bazel", build_file_contents)
@@ -517,10 +483,7 @@ and the target that we need respectively.
doc = "Name of the group, if any.",
),
"repo": attr.string(
- doc = """\
-Pointer to parent repo name. Used to make these rules rerun if the parent repo changes.
-Only used in WORKSPACE when the {attr}`dep_template` is not set.
-""",
+ doc = "Pointer to parent repo name. Used to make these rules rerun if the parent repo changes.",
),
"repo_prefix": attr.string(
doc = """
diff --git a/python/private/pypi/whl_library_targets.bzl b/python/private/pypi/whl_library_targets.bzl
index d32746b604..cf3df133c4 100644
--- a/python/private/pypi/whl_library_targets.bzl
+++ b/python/private/pypi/whl_library_targets.bzl
@@ -29,6 +29,89 @@ load(
"WHEEL_FILE_IMPL_LABEL",
"WHEEL_FILE_PUBLIC_LABEL",
)
+load(":parse_whl_name.bzl", "parse_whl_name")
+load(":pep508_deps.bzl", "deps")
+load(":whl_target_platforms.bzl", "whl_target_platforms")
+
+def whl_library_targets_from_requires(
+ *,
+ name,
+ metadata_name = "",
+ metadata_version = "",
+ requires_dist = [],
+ extras = [],
+ target_platforms = [],
+ default_python_version = None,
+ group_deps = [],
+ **kwargs):
+ """The macro to create whl targets from the METADATA.
+
+ Args:
+ name: {type}`str` The wheel filename
+ metadata_name: {type}`str` The package name as written in wheel `METADATA`.
+ metadata_version: {type}`str` The package version as written in wheel `METADATA`.
+ group_deps: {type}`list[str]` names of fellow members of the group (if
+ any). These will be excluded from generated deps lists so as to avoid
+ direct cycles. These dependencies will be provided at runtime by the
+ group rules which wrap this library and its fellows together.
+ requires_dist: {type}`list[str]` The list of `Requires-Dist` values from
+ the whl `METADATA`.
+ extras: {type}`list[str]` The list of requested extras. This essentially includes extra transitive dependencies in the final targets depending on the wheel `METADATA`.
+ target_platforms: {type}`list[str]` The list of target platforms to create
+ dependency closures for.
+ default_python_version: {type}`str` The python version to assume when parsing
+ the `METADATA`. This is only used when the `target_platforms` do not
+ include the version information.
+ **kwargs: Extra args passed to the {obj}`whl_library_targets`
+ """
+ package_deps = _parse_requires_dist(
+ name = name,
+ default_python_version = default_python_version,
+ requires_dist = requires_dist,
+ excludes = group_deps,
+ extras = extras,
+ target_platforms = target_platforms,
+ )
+ whl_library_targets(
+ name = name,
+ dependencies = package_deps.deps,
+ dependencies_by_platform = package_deps.deps_select,
+ tags = [
+ "pypi_name={}".format(metadata_name),
+ "pypi_version={}".format(metadata_version),
+ ],
+ **kwargs
+ )
+
+def _parse_requires_dist(
+ *,
+ name,
+ default_python_version,
+ requires_dist,
+ excludes,
+ extras,
+ target_platforms):
+ parsed_whl = parse_whl_name(name)
+
+ # NOTE @aignas 2023-12-04: if the wheel is a platform specific wheel, we
+ # only include deps for that target platform
+ if parsed_whl.platform_tag != "any":
+ target_platforms = [
+ p.target_platform
+ for p in whl_target_platforms(
+ platform_tag = parsed_whl.platform_tag,
+ abi_tag = parsed_whl.abi_tag.strip("tm"),
+ )
+ ]
+
+ return deps(
+ name = normalize_name(parsed_whl.distribution),
+ requires_dist = requires_dist,
+ platforms = target_platforms,
+ excludes = excludes,
+ extras = extras,
+ default_python_version = default_python_version,
+ )
def whl_library_targets(
*,
diff --git a/tests/pypi/extension/extension_tests.bzl b/tests/pypi/extension/extension_tests.bzl
index 4d86d6a6e0..ce5474e35b 100644
--- a/tests/pypi/extension/extension_tests.bzl
+++ b/tests/pypi/extension/extension_tests.bzl
@@ -436,7 +436,6 @@ torch==2.4.1+cpu ; platform_machine == 'x86_64' \
pypi.whl_libraries().contains_exactly({
"pypi_312_torch_cp312_cp312_linux_x86_64_8800deef": {
"dep_template": "@pypi//{name}:{target}",
- "experimental_target_platforms": ["cp312_linux_x86_64"],
"filename": "torch-2.4.1+cpu-cp312-cp312-linux_x86_64.whl",
"python_interpreter_target": "unit_test_interpreter_target",
"requirement": "torch==2.4.1+cpu",
@@ -445,7 +444,6 @@ torch==2.4.1+cpu ; platform_machine == 'x86_64' \
},
"pypi_312_torch_cp312_cp312_manylinux_2_17_aarch64_36109432": {
"dep_template": "@pypi//{name}:{target}",
- "experimental_target_platforms": ["cp312_linux_aarch64"],
"filename": "torch-2.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl",
"python_interpreter_target": "unit_test_interpreter_target",
"requirement": "torch==2.4.1",
@@ -454,7 +452,6 @@ torch==2.4.1+cpu ; platform_machine == 'x86_64' \
},
"pypi_312_torch_cp312_cp312_win_amd64_3a570e5c": {
"dep_template": "@pypi//{name}:{target}",
- "experimental_target_platforms": ["cp312_windows_x86_64"],
"filename": "torch-2.4.1+cpu-cp312-cp312-win_amd64.whl",
"python_interpreter_target": "unit_test_interpreter_target",
"requirement": "torch==2.4.1+cpu",
@@ -463,7 +460,6 @@ torch==2.4.1+cpu ; platform_machine == 'x86_64' \
},
"pypi_312_torch_cp312_none_macosx_11_0_arm64_72b484d5": {
"dep_template": "@pypi//{name}:{target}",
- "experimental_target_platforms": ["cp312_osx_aarch64"],
"filename": "torch-2.4.1-cp312-none-macosx_11_0_arm64.whl",
"python_interpreter_target": "unit_test_interpreter_target",
"requirement": "torch==2.4.1",
@@ -750,7 +746,6 @@ git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef
pypi.whl_libraries().contains_exactly({
"pypi_315_any_name": {
"dep_template": "@pypi//{name}:{target}",
- "experimental_target_platforms": ["cp315_linux_aarch64", "cp315_linux_arm", "cp315_linux_ppc", "cp315_linux_s390x", "cp315_linux_x86_64", "cp315_osx_aarch64", "cp315_osx_x86_64", "cp315_windows_x86_64"],
"extra_pip_args": ["--extra-args-for-sdist-building"],
"filename": "any-name.tar.gz",
"python_interpreter_target": "unit_test_interpreter_target",
@@ -760,7 +755,6 @@ git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef
},
"pypi_315_direct_without_sha_0_0_1_py3_none_any": {
"dep_template": "@pypi//{name}:{target}",
- "experimental_target_platforms": ["cp315_linux_aarch64", "cp315_linux_arm", "cp315_linux_ppc", "cp315_linux_s390x", "cp315_linux_x86_64", "cp315_osx_aarch64", "cp315_osx_x86_64", "cp315_windows_x86_64"],
"filename": "direct_without_sha-0.0.1-py3-none-any.whl",
"python_interpreter_target": "unit_test_interpreter_target",
"requirement": "direct_without_sha==0.0.1 @ example-direct.org/direct_without_sha-0.0.1-py3-none-any.whl",
@@ -781,7 +775,6 @@ git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef
},
"pypi_315_simple_py3_none_any_deadb00f": {
"dep_template": "@pypi//{name}:{target}",
- "experimental_target_platforms": ["cp315_linux_aarch64", "cp315_linux_arm", "cp315_linux_ppc", "cp315_linux_s390x", "cp315_linux_x86_64", "cp315_osx_aarch64", "cp315_osx_x86_64", "cp315_windows_x86_64"],
"filename": "simple-0.0.1-py3-none-any.whl",
"python_interpreter_target": "unit_test_interpreter_target",
"requirement": "simple==0.0.1",
@@ -790,7 +783,6 @@ git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef
},
"pypi_315_simple_sdist_deadbeef": {
"dep_template": "@pypi//{name}:{target}",
- "experimental_target_platforms": ["cp315_linux_aarch64", "cp315_linux_arm", "cp315_linux_ppc", "cp315_linux_s390x", "cp315_linux_x86_64", "cp315_osx_aarch64", "cp315_osx_x86_64", "cp315_windows_x86_64"],
"extra_pip_args": ["--extra-args-for-sdist-building"],
"filename": "simple-0.0.1.tar.gz",
"python_interpreter_target": "unit_test_interpreter_target",
@@ -800,7 +792,6 @@ git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef
},
"pypi_315_some_pkg_py3_none_any_deadbaaf": {
"dep_template": "@pypi//{name}:{target}",
- "experimental_target_platforms": ["cp315_linux_aarch64", "cp315_linux_arm", "cp315_linux_ppc", "cp315_linux_s390x", "cp315_linux_x86_64", "cp315_osx_aarch64", "cp315_osx_x86_64", "cp315_windows_x86_64"],
"filename": "some_pkg-0.0.1-py3-none-any.whl",
"python_interpreter_target": "unit_test_interpreter_target",
"requirement": "some_pkg==0.0.1 @ example-direct.org/some_pkg-0.0.1-py3-none-any.whl --hash=sha256:deadbaaf",
@@ -809,7 +800,6 @@ git_dep @ git+https://git.server/repo/project@deadbeefdeadbeef
},
"pypi_315_some_py3_none_any_deadb33f": {
"dep_template": "@pypi//{name}:{target}",
- "experimental_target_platforms": ["cp315_linux_aarch64", "cp315_linux_arm", "cp315_linux_ppc", "cp315_linux_s390x", "cp315_linux_x86_64", "cp315_osx_aarch64", "cp315_osx_x86_64", "cp315_windows_x86_64"],
"filename": "some-other-pkg-0.0.1-py3-none-any.whl",
"python_interpreter_target": "unit_test_interpreter_target",
"requirement": "some_other_pkg==0.0.1",
diff --git a/tests/pypi/generate_whl_library_build_bazel/generate_whl_library_build_bazel_tests.bzl b/tests/pypi/generate_whl_library_build_bazel/generate_whl_library_build_bazel_tests.bzl
index b0d8f6d17e..7bd19b65c1 100644
--- a/tests/pypi/generate_whl_library_build_bazel/generate_whl_library_build_bazel_tests.bzl
+++ b/tests/pypi/generate_whl_library_build_bazel/generate_whl_library_build_bazel_tests.bzl
@@ -21,11 +21,11 @@ _tests = []
def _test_all(env):
want = """\
-load("@rules_python//python/private/pypi:whl_library_targets.bzl", "whl_library_targets")
+load("@rules_python//python/private/pypi:whl_library_targets.bzl", "whl_library_targets_from_requires")
package(default_visibility = ["//visibility:public"])
-whl_library_targets(
+whl_library_targets_from_requires(
copy_executables = {
"exec_src": "exec_dest",
},
@@ -38,19 +38,71 @@ whl_library_targets(
"data_exclude_all",
],
dep_template = "@pypi//{name}:{target}",
- dependencies = [
+ entry_points = {
+ "foo": "bar.py",
+ },
+ group_deps = [
+ "foo",
+ "fox",
+ "qux",
+ ],
+ group_name = "qux",
+ name = "foo.whl",
+ requires_dist = [
"foo",
"bar-baz",
"qux",
],
- dependencies_by_platform = {
- "linux_x86_64": [
- "box",
- "box-amd64",
- ],
- "windows_x86_64": ["fox"],
- "@platforms//os:linux": ["box"],
+ srcs_exclude = ["srcs_exclude_all"],
+ target_platforms = ["foo"],
+)
+
+# SOMETHING SPECIAL AT THE END
+"""
+ actual = generate_whl_library_build_bazel(
+ dep_template = "@pypi//{name}:{target}",
+ name = "foo.whl",
+ requires_dist = ["foo", "bar-baz", "qux"],
+ entry_points = {
+ "foo": "bar.py",
+ },
+ data_exclude = ["exclude_via_attr"],
+ annotation = struct(
+ copy_files = {"file_src": "file_dest"},
+ copy_executables = {"exec_src": "exec_dest"},
+ data = ["extra_target"],
+ data_exclude_glob = ["data_exclude_all"],
+ srcs_exclude_glob = ["srcs_exclude_all"],
+ additive_build_content = """# SOMETHING SPECIAL AT THE END""",
+ ),
+ group_name = "qux",
+ target_platforms = ["foo"],
+ group_deps = ["foo", "fox", "qux"],
+ )
+ env.expect.that_str(actual.replace("@@", "@")).equals(want)
+
+_tests.append(_test_all)
+
+def _test_all_with_loads(env):
+ want = """\
+load("@rules_python//python/private/pypi:whl_library_targets.bzl", "whl_library_targets_from_requires")
+load("@pypi//:config.bzl", "target_platforms")
+
+package(default_visibility = ["//visibility:public"])
+
+whl_library_targets_from_requires(
+ copy_executables = {
+ "exec_src": "exec_dest",
},
+ copy_files = {
+ "file_src": "file_dest",
+ },
+ data = ["extra_target"],
+ data_exclude = [
+ "exclude_via_attr",
+ "data_exclude_all",
+ ],
+ dep_template = "@pypi//{name}:{target}",
entry_points = {
"foo": "bar.py",
},
@@ -61,11 +113,13 @@ whl_library_targets(
],
group_name = "qux",
name = "foo.whl",
- srcs_exclude = ["srcs_exclude_all"],
- tags = [
- "tag2",
- "tag1",
+ requires_dist = [
+ "foo",
+ "bar-baz",
+ "qux",
],
+ srcs_exclude = ["srcs_exclude_all"],
+ target_platforms = target_platforms,
)
# SOMETHING SPECIAL AT THE END
@@ -73,13 +127,7 @@ whl_library_targets(
actual = generate_whl_library_build_bazel(
dep_template = "@pypi//{name}:{target}",
name = "foo.whl",
- dependencies = ["foo", "bar-baz", "qux"],
- dependencies_by_platform = {
- "linux_x86_64": ["box", "box-amd64"],
- "windows_x86_64": ["fox"],
- "@platforms//os:linux": ["box"], # buildifier: disable=unsorted-dict-items to check that we sort inside the test
- },
- tags = ["tag2", "tag1"],
+ requires_dist = ["foo", "bar-baz", "qux"],
entry_points = {
"foo": "bar.py",
},
@@ -97,7 +145,7 @@ whl_library_targets(
)
env.expect.that_str(actual.replace("@@", "@")).equals(want)
-_tests.append(_test_all)
+_tests.append(_test_all_with_loads)
def generate_whl_library_build_bazel_test_suite(name):
"""Create the test suite.
diff --git a/tests/pypi/pep508/deps_tests.bzl b/tests/pypi/pep508/deps_tests.bzl
index 44031ab6a5..d362925080 100644
--- a/tests/pypi/pep508/deps_tests.bzl
+++ b/tests/pypi/pep508/deps_tests.bzl
@@ -29,58 +29,48 @@ def test_simple_deps(env):
_tests.append(test_simple_deps)
def test_can_add_os_specific_deps(env):
- got = deps(
- "foo",
- requires_dist = [
- "bar",
- "an_osx_dep; sys_platform=='darwin'",
- "posix_dep; os_name=='posix'",
- "win_dep; os_name=='nt'",
- ],
- platforms = [
- "linux_x86_64",
- "osx_x86_64",
- "osx_aarch64",
- "windows_x86_64",
- ],
- host_python_version = "3.3.1",
- )
-
- env.expect.that_collection(got.deps).contains_exactly(["bar"])
- env.expect.that_dict(got.deps_select).contains_exactly({
- "@platforms//os:linux": ["posix_dep"],
- "@platforms//os:osx": ["an_osx_dep", "posix_dep"],
- "@platforms//os:windows": ["win_dep"],
- })
+ for target in [
+ struct(
+ platforms = [
+ "linux_x86_64",
+ "osx_x86_64",
+ "osx_aarch64",
+ "windows_x86_64",
+ ],
+ python_version = "3.3.1",
+ ),
+ struct(
+ platforms = [
+ "cp33_linux_x86_64",
+ "cp33_osx_x86_64",
+ "cp33_osx_aarch64",
+ "cp33_windows_x86_64",
+ ],
+ python_version = "",
+ ),
+ ]:
+ got = deps(
+ "foo",
+ requires_dist = [
+ "bar",
+ "an_osx_dep; sys_platform=='darwin'",
+ "posix_dep; os_name=='posix'",
+ "win_dep; os_name=='nt'",
+ ],
+ platforms = target.platforms,
+ default_python_version = target.python_version,
+ )
+
+ env.expect.that_collection(got.deps).contains_exactly(["bar"])
+ env.expect.that_dict(got.deps_select).contains_exactly({
+ "linux_x86_64": ["posix_dep"],
+ "osx_aarch64": ["an_osx_dep", "posix_dep"],
+ "osx_x86_64": ["an_osx_dep", "posix_dep"],
+ "windows_x86_64": ["win_dep"],
+ })
_tests.append(test_can_add_os_specific_deps)
-def test_can_add_os_specific_deps_with_python_version(env):
- got = deps(
- "foo",
- requires_dist = [
- "bar",
- "an_osx_dep; sys_platform=='darwin'",
- "posix_dep; os_name=='posix'",
- "win_dep; os_name=='nt'",
- ],
- platforms = [
- "cp33_linux_x86_64",
- "cp33_osx_x86_64",
- "cp33_osx_aarch64",
- "cp33_windows_x86_64",
- ],
- )
-
- env.expect.that_collection(got.deps).contains_exactly(["bar"])
- env.expect.that_dict(got.deps_select).contains_exactly({
- "@platforms//os:linux": ["posix_dep"],
- "@platforms//os:osx": ["an_osx_dep", "posix_dep"],
- "@platforms//os:windows": ["win_dep"],
- })
-
-_tests.append(test_can_add_os_specific_deps_with_python_version)
-
def test_deps_are_added_to_more_specialized_platforms(env):
got = deps(
"foo",
@@ -92,41 +82,16 @@ def test_deps_are_added_to_more_specialized_platforms(env):
"osx_x86_64",
"osx_aarch64",
],
- host_python_version = "3.8.4",
+ default_python_version = "3.8.4",
)
- env.expect.that_collection(got.deps).contains_exactly([])
+ env.expect.that_collection(got.deps).contains_exactly(["mac_dep"])
env.expect.that_dict(got.deps_select).contains_exactly({
- "@platforms//os:osx": ["mac_dep"],
- "osx_aarch64": ["m1_dep", "mac_dep"],
+ "osx_aarch64": ["m1_dep"],
})
_tests.append(test_deps_are_added_to_more_specialized_platforms)
-def test_deps_from_more_specialized_platforms_are_propagated(env):
- got = deps(
- "foo",
- requires_dist = [
- "a_mac_dep; sys_platform=='darwin'",
- "m1_dep; sys_platform=='darwin' and platform_machine=='arm64'",
- ],
- platforms = [
- "osx_x86_64",
- "osx_aarch64",
- ],
- host_python_version = "3.8.4",
- )
-
- env.expect.that_collection(got.deps).contains_exactly([])
- env.expect.that_dict(got.deps_select).contains_exactly(
- {
- "@platforms//os:osx": ["a_mac_dep"],
- "osx_aarch64": ["a_mac_dep", "m1_dep"],
- },
- )
-
-_tests.append(test_deps_from_more_specialized_platforms_are_propagated)
-
def test_non_platform_markers_are_added_to_common_deps(env):
got = deps(
"foo",
@@ -141,7 +106,7 @@ def test_non_platform_markers_are_added_to_common_deps(env):
"osx_aarch64",
"windows_x86_64",
],
- host_python_version = "3.8.4",
+ default_python_version = "3.8.4",
)
env.expect.that_collection(got.deps).contains_exactly(["bar", "baz"])
@@ -204,38 +169,34 @@ def _test_can_get_deps_based_on_specific_python_version(env):
platforms = ["cp37_linux_x86_64"],
)
+ # since there is a single target platform, the deps_select will be empty
env.expect.that_collection(py37.deps).contains_exactly(["bar", "baz"])
env.expect.that_dict(py37.deps_select).contains_exactly({})
- env.expect.that_collection(py38.deps).contains_exactly(["bar"])
- env.expect.that_dict(py38.deps_select).contains_exactly({"@platforms//os:linux": ["posix_dep"]})
+ env.expect.that_collection(py38.deps).contains_exactly(["bar", "posix_dep"])
+ env.expect.that_dict(py38.deps_select).contains_exactly({})
_tests.append(_test_can_get_deps_based_on_specific_python_version)
def _test_no_version_select_when_single_version(env):
- requires_dist = [
- "bar",
- "baz; python_version >= '3.8'",
- "posix_dep; os_name=='posix'",
- "posix_dep_with_version; os_name=='posix' and python_version >= '3.8'",
- "arch_dep; platform_machine=='x86_64' and python_version >= '3.8'",
- ]
- host_python_version = "3.7.5"
-
got = deps(
"foo",
- requires_dist = requires_dist,
+ requires_dist = [
+ "bar",
+ "baz; python_version >= '3.8'",
+ "posix_dep; os_name=='posix'",
+ "posix_dep_with_version; os_name=='posix' and python_version >= '3.8'",
+ "arch_dep; platform_machine=='x86_64' and python_version >= '3.8'",
+ ],
platforms = [
"cp38_linux_x86_64",
"cp38_windows_x86_64",
],
- host_python_version = host_python_version,
+ default_python_version = "",
)
- env.expect.that_collection(got.deps).contains_exactly(["bar", "baz"])
+ env.expect.that_collection(got.deps).contains_exactly(["bar", "baz", "arch_dep"])
env.expect.that_dict(got.deps_select).contains_exactly({
- "@platforms//os:linux": ["posix_dep", "posix_dep_with_version"],
- "linux_x86_64": ["arch_dep", "posix_dep", "posix_dep_with_version"],
- "windows_x86_64": ["arch_dep"],
+ "linux_x86_64": ["posix_dep", "posix_dep_with_version"],
})
_tests.append(_test_no_version_select_when_single_version)
@@ -249,7 +210,7 @@ def _test_can_get_version_select(env):
"posix_dep_with_version; os_name=='posix' and python_version >= '3.8'",
"arch_dep; platform_machine=='x86_64' and python_version < '3.8'",
]
- host_python_version = "3.7.4"
+ default_python_version = "3.7.4"
got = deps(
"foo",
@@ -259,31 +220,19 @@ def _test_can_get_version_select(env):
for minor in [7, 8, 9]
for os in ["linux", "windows"]
],
- host_python_version = host_python_version,
+ default_python_version = default_python_version,
)
env.expect.that_collection(got.deps).contains_exactly(["bar"])
env.expect.that_dict(got.deps_select).contains_exactly({
- str(Label("//python/config_settings:is_python_3.7")): ["baz"],
- str(Label("//python/config_settings:is_python_3.8")): ["baz_new"],
- str(Label("//python/config_settings:is_python_3.9")): ["baz_new"],
- "@platforms//os:linux": ["baz", "posix_dep"],
- "cp37_linux_anyarch": ["baz", "posix_dep"],
"cp37_linux_x86_64": ["arch_dep", "baz", "posix_dep"],
"cp37_windows_x86_64": ["arch_dep", "baz"],
- "cp38_linux_anyarch": [
- "baz_new",
- "posix_dep",
- "posix_dep_with_version",
- ],
- "cp39_linux_anyarch": [
- "baz_new",
- "posix_dep",
- "posix_dep_with_version",
- ],
+ "cp38_linux_x86_64": ["baz_new", "posix_dep", "posix_dep_with_version"],
+ "cp38_windows_x86_64": ["baz_new"],
+ "cp39_linux_x86_64": ["baz_new", "posix_dep", "posix_dep_with_version"],
+ "cp39_windows_x86_64": ["baz_new"],
"linux_x86_64": ["arch_dep", "baz", "posix_dep"],
"windows_x86_64": ["arch_dep", "baz"],
- "//conditions:default": ["baz"],
})
_tests.append(_test_can_get_version_select)
@@ -294,7 +243,7 @@ def _test_deps_spanning_all_target_py_versions_are_added_to_common(env):
"baz (<2,>=1.11) ; python_version < '3.8'",
"baz (<2,>=1.14) ; python_version >= '3.8'",
]
- host_python_version = "3.8.4"
+ default_python_version = "3.8.4"
got = deps(
"foo",
@@ -303,7 +252,7 @@ def _test_deps_spanning_all_target_py_versions_are_added_to_common(env):
"cp3{}_linux_x86_64".format(minor)
for minor in [7, 8, 9]
],
- host_python_version = host_python_version,
+ default_python_version = default_python_version,
)
env.expect.that_collection(got.deps).contains_exactly(["bar", "baz"])
@@ -312,7 +261,7 @@ def _test_deps_spanning_all_target_py_versions_are_added_to_common(env):
_tests.append(_test_deps_spanning_all_target_py_versions_are_added_to_common)
def _test_deps_are_not_duplicated(env):
- host_python_version = "3.7.4"
+ default_python_version = "3.7.4"
# See an example in
# https://files.pythonhosted.org/packages/76/9e/db1c2d56c04b97981c06663384f45f28950a73d9acf840c4006d60d0a1ff/opencv_python-4.9.0.80-cp37-abi3-win32.whl.metadata
@@ -336,7 +285,7 @@ def _test_deps_are_not_duplicated(env):
for os in ["linux", "osx", "windows"]
for arch in ["x86_64", "aarch64"]
],
- host_python_version = host_python_version,
+ default_python_version = default_python_version,
)
env.expect.that_collection(got.deps).contains_exactly(["bar"])
@@ -345,7 +294,7 @@ def _test_deps_are_not_duplicated(env):
_tests.append(_test_deps_are_not_duplicated)
def _test_deps_are_not_duplicated_when_encountering_platform_dep_first(env):
- host_python_version = "3.7.1"
+ default_python_version = "3.7.1"
# Note, that we are sorting the incoming `requires_dist` and we need to ensure that we are not getting any
# issues even if the platform-specific line comes first.
@@ -363,15 +312,13 @@ def _test_deps_are_not_duplicated_when_encountering_platform_dep_first(env):
"cp310_linux_aarch64",
"cp310_linux_x86_64",
],
- host_python_version = host_python_version,
+ default_python_version = default_python_version,
)
- # TODO @aignas 2025-02-24: this test case in the python version is passing but
- # I am not sure why. The starlark version behaviour looks more correct.
env.expect.that_collection(got.deps).contains_exactly([])
env.expect.that_dict(got.deps_select).contains_exactly({
- str(Label("//python/config_settings:is_python_3.10")): ["bar"],
"cp310_linux_aarch64": ["bar"],
+ "cp310_linux_x86_64": ["bar"],
"cp37_linux_aarch64": ["bar"],
"linux_aarch64": ["bar"],
})
diff --git a/tests/pypi/whl_library_targets/whl_library_targets_tests.bzl b/tests/pypi/whl_library_targets/whl_library_targets_tests.bzl
index f738e03b5d..61e5441050 100644
--- a/tests/pypi/whl_library_targets/whl_library_targets_tests.bzl
+++ b/tests/pypi/whl_library_targets/whl_library_targets_tests.bzl
@@ -16,7 +16,7 @@
load("@rules_testing//lib:test_suite.bzl", "test_suite")
load("//python/private:glob_excludes.bzl", "glob_excludes") # buildifier: disable=bzl-visibility
-load("//python/private/pypi:whl_library_targets.bzl", "whl_library_targets") # buildifier: disable=bzl-visibility
+load("//python/private/pypi:whl_library_targets.bzl", "whl_library_targets", "whl_library_targets_from_requires") # buildifier: disable=bzl-visibility
_tests = []
@@ -183,6 +183,71 @@ def _test_entrypoints(env):
_tests.append(_test_entrypoints)
+def _test_whl_and_library_deps_from_requires(env):
+ filegroup_calls = []
+ py_library_calls = []
+
+ whl_library_targets_from_requires(
+ name = "foo-0-py3-none-any.whl",
+ metadata_name = "Foo",
+ metadata_version = "0",
+ dep_template = "@pypi_{name}//:{target}",
+ requires_dist = [
+ "foo", # this self-edge will be ignored
+ "bar-baz",
+ ],
+ target_platforms = ["cp38_linux_x86_64"],
+ default_python_version = "3.8.1",
+ data_exclude = [],
+ # Overrides for testing
+ filegroups = {},
+ native = struct(
+ filegroup = lambda **kwargs: filegroup_calls.append(kwargs),
+ config_setting = lambda **_: None,
+ glob = _glob,
+ select = _select,
+ ),
+ rules = struct(
+ py_library = lambda **kwargs: py_library_calls.append(kwargs),
+ ),
+ )
+
+ env.expect.that_collection(filegroup_calls).contains_exactly([
+ {
+ "name": "whl",
+ "srcs": ["foo-0-py3-none-any.whl"],
+ "data": ["@pypi_bar_baz//:whl"],
+ "visibility": ["//visibility:public"],
+ },
+ ]) # buildifier: @unsorted-dict-items
+ env.expect.that_collection(py_library_calls).contains_exactly([
+ {
+ "name": "pkg",
+ "srcs": _glob(
+ ["site-packages/**/*.py"],
+ exclude = [],
+ allow_empty = True,
+ ),
+ "pyi_srcs": _glob(["site-packages/**/*.pyi"], allow_empty = True),
+ "data": [] + _glob(
+ ["site-packages/**/*"],
+ exclude = [
+ "**/*.py",
+ "**/*.pyc",
+ "**/*.pyc.*",
+ "**/*.dist-info/RECORD",
+ ] + glob_excludes.version_dependent_exclusions(),
+ ),
+ "imports": ["site-packages"],
+ "deps": ["@pypi_bar_baz//:pkg"],
+ "tags": ["pypi_name=Foo", "pypi_version=0"],
+ "visibility": ["//visibility:public"],
+ "experimental_venvs_site_packages": Label("//python/config_settings:venvs_site_packages"),
+ },
+ ]) # buildifier: @unsorted-dict-items
+
+_tests.append(_test_whl_and_library_deps_from_requires)
+
def _test_whl_and_library_deps(env):
filegroup_calls = []
py_library_calls = []
From c981569cc89c76eb57a78f0bbc47f1566211c924 Mon Sep 17 00:00:00 2001
From: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
Date: Mon, 21 Apr 2025 15:13:10 +0900
Subject: [PATCH 039/156] chore: remove a stray file (#2795)
Remove a stray file
---
config.bzl.tmpl.bzlmod | 0
1 file changed, 0 insertions(+), 0 deletions(-)
delete mode 100644 config.bzl.tmpl.bzlmod
diff --git a/config.bzl.tmpl.bzlmod b/config.bzl.tmpl.bzlmod
deleted file mode 100644
index e69de29bb2..0000000000
From e11873323ffc2694489131fd2f861c0619907bc1 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 21 Apr 2025 22:19:07 +0000
Subject: [PATCH 040/156] build(deps): bump sphinx-rtd-theme from 3.0.1 to
3.0.2 in /docs (#2802)
Bumps
[sphinx-rtd-theme](https://github.com/readthedocs/sphinx_rtd_theme) from
3.0.1 to 3.0.2.
Changelog
Sourced from sphinx-rtd-theme's
changelog.
3.0.2
- Show current translation when the flyout is attached
- Fix JavaScript issue that didn't allow users to disable
selectors
.. _release-3.0.1:
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
docs/requirements.txt | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 5e308b00f4..747ae59e1a 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -319,9 +319,9 @@ sphinx-reredirects==0.1.6 \
--hash=sha256:c491cba545f67be9697508727818d8626626366245ae64456fe29f37e9bbea64 \
--hash=sha256:efd50c766fbc5bf40cd5148e10c00f2c00d143027de5c5e48beece93cc40eeea
# via rules-python-docs (docs/pyproject.toml)
-sphinx-rtd-theme==3.0.1 \
- --hash=sha256:921c0ece75e90633ee876bd7b148cfaad136b481907ad154ac3669b6fc957916 \
- --hash=sha256:a4c5745d1b06dfcb80b7704fe532eb765b44065a8fad9851e4258c8804140703
+sphinx-rtd-theme==3.0.2 \
+ --hash=sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13 \
+ --hash=sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85
# via rules-python-docs (docs/pyproject.toml)
sphinxcontrib-applehelp==2.0.0 \
--hash=sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1 \
From a57c4de9dbb722765685cd2deae71fc73efcde75 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 21 Apr 2025 22:19:54 +0000
Subject: [PATCH 041/156] build(deps): bump astroid from 3.3.6 to 3.3.9 in
/docs (#2803)
Bumps [astroid](https://github.com/pylint-dev/astroid) from 3.3.6 to
3.3.9.
Release notes
Sourced from astroid's
releases.
v3.3.9
What's New in astroid 3.3.9?
Release date: 2025-03-09
v3.3.8
What's New in astroid 3.3.8?
Release date: 2024-12-23
v3.3.7
What's New in astroid 3.3.7?
Release date: 2024-12-21
Changelog
Sourced from astroid's
changelog.
What's New in astroid 3.3.9?
Release date: 2025-03-09
What's New in astroid 3.3.8?
Release date: 2024-12-23
What's New in astroid 3.3.7?
Release date: 2024-12-20
This release was yanked.
Commits
a6ccad5
Bump astroid to 3.3.9, update changelog
ec2df97
Add setuptools in order to run 3.12/3.13 tests
74c34fb
Bump actions/cache from 4.2.0 to 4.2.2 (#2692)
5512bf2
Update release workflow to use Trusted Publishing (#2696)
aad8e68
[Backport maintenance/3.3.x] Fix missing dict (#2685)
(#2690)
234be58
Fix RuntimeError caused by analyzing live objects with
__getattribute__
or ...
6aeafd5
Bump pylint in pre-commit configuration to 3.2.7
d52799b
Bump astroid to 3.3.8, update changelog
68714df
[Backport maintenance/3.3.x] Another attempt at fixing the
collections.abc
...
7cfbad1
Skip flaky recursion test on PyPy (#2661)
(#2663)
- Additional commits viewable in compare
view
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
docs/requirements.txt | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 747ae59e1a..ee242e07d0 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -10,9 +10,9 @@ alabaster==1.0.0 \
--hash=sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e \
--hash=sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b
# via sphinx
-astroid==3.3.6 \
- --hash=sha256:6aaea045f938c735ead292204afdb977a36e989522b7833ef6fea94de743f442 \
- --hash=sha256:db676dc4f3ae6bfe31cda227dc60e03438378d7a896aec57422c95634e8d722f
+astroid==3.3.9 \
+ --hash=sha256:622cc8e3048684aa42c820d9d218978021c3c3d174fb03a9f0d615921744f550 \
+ --hash=sha256:d05bfd0acba96a7bd43e222828b7d9bc1e138aaeb0649707908d3702a9831248
# via sphinx-autodoc2
babel==2.17.0 \
--hash=sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d \
From aaf8ce8adb43536f24ecfe38038351afafcbfa65 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 21 Apr 2025 22:22:05 +0000
Subject: [PATCH 042/156] build(deps): bump packaging from 24.2 to 25.0 in
/docs (#2804)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Bumps [packaging](https://github.com/pypa/packaging) from 24.2 to 25.0.
Release notes
Sourced from packaging's
releases.
25.0
What's Changed
New Contributors
Full Changelog: https://github.com/pypa/packaging/compare/24.2...25.0
Changelog
Sourced from packaging's
changelog.
25.0 - 2025-04-19
* PEP 751: Add support for ``extras`` and ``dependency_groups`` markers.
(:issue:`885`)
* PEP 738: Add support for Android platform tags. (:issue:`880`)
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
docs/requirements.txt | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/docs/requirements.txt b/docs/requirements.txt
index ee242e07d0..e4ec16fa5e 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -223,9 +223,9 @@ myst-parser==4.0.0 \
--hash=sha256:851c9dfb44e36e56d15d05e72f02b80da21a9e0d07cba96baf5e2d476bb91531 \
--hash=sha256:b9317997552424448c6096c2558872fdb6f81d3ecb3a40ce84a7518798f3f28d
# via rules-python-docs (docs/pyproject.toml)
-packaging==24.2 \
- --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \
- --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f
+packaging==25.0 \
+ --hash=sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484 \
+ --hash=sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f
# via
# readthedocs-sphinx-ext
# sphinx
From f4780f7b71dc224ea3b51b4ec8048b829e1f3375 Mon Sep 17 00:00:00 2001
From: Richard Levasseur
Date: Mon, 21 Apr 2025 15:35:13 -0700
Subject: [PATCH 043/156] fix: fixes to prepare for making bootstrap=script the
default for Linux (#2760)
Various cleanup and prep work to switch bootstrap=script to be the
default.
* Change `bootstrap_impl` to always be disabled for windows. This allows
setting it to
true in a bazelrc without worrying about the target platform. This is
done by using
FeatureFlagInfo to force the value to disabled for windows. This allows
any downstream
usages of the flag to Just Work and not have to add selects() for
windows themselves.
* Switch pip_repository_annotations test to `import python.runfiles`.
The script bootstrap
doesn't add the runfiles root to sys.path, so `import rules_python`
stops working.
* Switch gazelle workspace to using the runtime-env toolchain. It was
previously
implicitly using the deprecated one built into bazel, which doesn't
provide various
necessary provider fields.
* Make the local toolchain use `sys._base_executable` instead of
`sys.executable`
when finding the interpreter. Otherwise, it might find a venv
interpreter or not
properly handle wrapper scripts like pyenv.
* Adds a toolchain attribute/field to indicate if the toolchain supports
a build-time
created venv. This is due to the runtime_env toolchain. See PR comments
for details,
but in short: if we don't know the python interpreter path and version
at
build time, the venv may not properly activate or find site-packages.
If it isn't supported, then the stage1 bootstrap creates a temporary
venv, similar
to how the zip case is handled. Unfortunately, this requires invoking
Python itself
as part of program startup, but I don't see a way around that -- note
this is
only triggered by the runtime-env toolchain.
* Make the runtime-env toolchain better support virtualenvs. Because
it's a wrapper
that re-invokes Python, Python can't automatically detect its in a venv.
Two
tricks are used (`exec -a` and PYTHONEXECUTABLE) to help address this
(but they
aren't guaranteed to work, hence the "recreate at runtime" logic).
* Fix a subtle issue where `sys._base_executable` isn't set correctly
due to `home`
missing in the pyvenv.cfg file. This mostly only affected the creation
of venvs
from within the bazel-created venv.
* Change the bazel site init to always add the build-time created
site-packages
(if it exists) as a site directory. This matches the system_python
bootstrap
behavior a bit better, which just shoved everything onto sys.path using
PYTHONPATH.
* Skip running runtime_env_toolchains tests on RBE. RBE's system python
is 3.6,
but the script bootstrap uses 3.9 features. (Running it on RBE is
questionable
anyways).
Along the way...
* Ignore gazelle convenience symlinks
* Switch pip_repository_annotations test to use
non-legacy_external_runfiles based
paths. The legacy behavior is disabled in Bazel 8+ by default.
* Also document why the script bootstrap doesn't add the runfiles root
to sys.path.
Work towards https://github.com/bazel-contrib/rules_python/issues/2521
---------
Co-authored-by: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
---
.bazelignore | 1 +
CHANGELOG.md | 10 +-
examples/pip_repository_annotations/.bazelrc | 1 +
.../pip_repository_annotations_test.py | 25 ++---
gazelle/WORKSPACE | 2 +
python/config_settings/BUILD.bazel | 16 +++-
python/private/BUILD.bazel | 1 +
python/private/config_settings.bzl | 30 ++++++
python/private/flags.bzl | 32 ++++++-
python/private/get_local_runtime_info.py | 1 +
python/private/local_runtime_repo.bzl | 14 +++
python/private/py_executable.bzl | 35 ++++++-
python/private/py_runtime_info.bzl | 26 ++++-
python/private/py_runtime_rule.bzl | 12 +++
python/private/runtime_env_toolchain.bzl | 12 +++
.../runtime_env_toolchain_interpreter.sh | 26 ++++-
python/private/site_init_template.py | 30 ++++++
python/private/stage1_bootstrap_template.sh | 94 ++++++++++++++-----
python/private/stage2_bootstrap_template.py | 22 +++++
.../integration/local_toolchains/BUILD.bazel | 2 +
tests/integration/local_toolchains/test.py | 53 +++++++++--
tests/runtime_env_toolchain/BUILD.bazel | 4 +
22 files changed, 393 insertions(+), 56 deletions(-)
diff --git a/.bazelignore b/.bazelignore
index e10af2035d..fb999097f5 100644
--- a/.bazelignore
+++ b/.bazelignore
@@ -25,6 +25,7 @@ examples/pip_parse/bazel-pip_parse
examples/pip_parse_vendored/bazel-pip_parse_vendored
examples/pip_repository_annotations/bazel-pip_repository_annotations
examples/py_proto_library/bazel-py_proto_library
+gazelle/bazel-gazelle
tests/integration/compile_pip_requirements/bazel-compile_pip_requirements
tests/integration/ignore_root_user_error/bazel-ignore_root_user_error
tests/integration/local_toolchains/bazel-local_toolchains
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 154b66114b..f696cefde2 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -54,13 +54,21 @@ END_UNRELEASED_TEMPLATE
{#v0-0-0-changed}
### Changed
-* Nothing changed.
+* (rules) On Windows, {obj}`--bootstrap_impl=system_python` is forced. This
+ allows setting `--bootstrap_impl=script` in bazelrc for mixed-platform
+ environments.
{#v0-0-0-fixed}
### Fixed
+
* (rules) PyInfo provider is now advertised by py_test, py_binary, and py_library;
this allows aspects using required_providers to function correctly.
([#2506](https://github.com/bazel-contrib/rules_python/issues/2506)).
+* Fixes when using {obj}`--bootstrap_impl=script`:
+ * `compile_pip_requirements` now works with it
+ * The `sys._base_executable` value will reflect the underlying interpreter,
+ not venv interpreter.
+ * The {obj}`//python/runtime_env_toolchains:all` toolchain now works with it.
{#v0-0-0-added}
### Added
diff --git a/examples/pip_repository_annotations/.bazelrc b/examples/pip_repository_annotations/.bazelrc
index c16c5a24f2..9397bd31b8 100644
--- a/examples/pip_repository_annotations/.bazelrc
+++ b/examples/pip_repository_annotations/.bazelrc
@@ -5,4 +5,5 @@ try-import %workspace%/user.bazelrc
# is in examples/bzlmod as the `whl_mods` feature.
common --noenable_bzlmod
common --enable_workspace
+common --legacy_external_runfiles=false
common --incompatible_python_disallow_native_rules
diff --git a/examples/pip_repository_annotations/pip_repository_annotations_test.py b/examples/pip_repository_annotations/pip_repository_annotations_test.py
index e41dd4f0f6..219be1ba03 100644
--- a/examples/pip_repository_annotations/pip_repository_annotations_test.py
+++ b/examples/pip_repository_annotations/pip_repository_annotations_test.py
@@ -21,7 +21,7 @@
import unittest
from pathlib import Path
-from rules_python.python.runfiles import runfiles
+from python.runfiles import runfiles
class PipRepositoryAnnotationsTest(unittest.TestCase):
@@ -34,11 +34,7 @@ def wheel_pkg_dir(self) -> str:
def test_build_content_and_data(self):
r = runfiles.Create()
- rpath = r.Rlocation(
- "pip_repository_annotations_example/external/{}/generated_file.txt".format(
- self.wheel_pkg_dir()
- )
- )
+ rpath = r.Rlocation("{}/generated_file.txt".format(self.wheel_pkg_dir()))
generated_file = Path(rpath)
self.assertTrue(generated_file.exists())
@@ -47,11 +43,7 @@ def test_build_content_and_data(self):
def test_copy_files(self):
r = runfiles.Create()
- rpath = r.Rlocation(
- "pip_repository_annotations_example/external/{}/copied_content/file.txt".format(
- self.wheel_pkg_dir()
- )
- )
+ rpath = r.Rlocation("{}/copied_content/file.txt".format(self.wheel_pkg_dir()))
copied_file = Path(rpath)
self.assertTrue(copied_file.exists())
@@ -61,7 +53,7 @@ def test_copy_files(self):
def test_copy_executables(self):
r = runfiles.Create()
rpath = r.Rlocation(
- "pip_repository_annotations_example/external/{}/copied_content/executable{}".format(
+ "{}/copied_content/executable{}".format(
self.wheel_pkg_dir(),
".exe" if platform.system() == "windows" else ".py",
)
@@ -82,7 +74,7 @@ def test_data_exclude_glob(self):
current_wheel_version = "0.38.4"
r = runfiles.Create()
- dist_info_dir = "pip_repository_annotations_example/external/{}/site-packages/wheel-{}.dist-info".format(
+ dist_info_dir = "{}/site-packages/wheel-{}.dist-info".format(
self.wheel_pkg_dir(),
current_wheel_version,
)
@@ -113,11 +105,8 @@ def test_extra(self):
# This test verifies that annotations work correctly for pip packages with extras
# specified, in this case requests[security].
r = runfiles.Create()
- rpath = r.Rlocation(
- "pip_repository_annotations_example/external/{}/generated_file.txt".format(
- self.requests_pkg_dir()
- )
- )
+ path = "{}/generated_file.txt".format(self.requests_pkg_dir())
+ rpath = r.Rlocation(path)
generated_file = Path(rpath)
self.assertTrue(generated_file.exists())
diff --git a/gazelle/WORKSPACE b/gazelle/WORKSPACE
index 14a124d5f2..ad428b10cd 100644
--- a/gazelle/WORKSPACE
+++ b/gazelle/WORKSPACE
@@ -42,6 +42,8 @@ load("//:internal_dev_deps.bzl", "internal_dev_deps")
internal_dev_deps()
+register_toolchains("@rules_python//python/runtime_env_toolchains:all")
+
load("//:deps.bzl", _py_gazelle_deps = "gazelle_deps")
# gazelle:repository_macro deps.bzl%go_deps
diff --git a/python/config_settings/BUILD.bazel b/python/config_settings/BUILD.bazel
index 45354e24d9..872d7d1bda 100644
--- a/python/config_settings/BUILD.bazel
+++ b/python/config_settings/BUILD.bazel
@@ -11,6 +11,7 @@ load(
"PrecompileSourceRetentionFlag",
"VenvsSitePackages",
"VenvsUseDeclareSymlinkFlag",
+ rp_string_flag = "string_flag",
)
load(
"//python/private/pypi:flags.bzl",
@@ -87,14 +88,27 @@ string_flag(
visibility = ["//visibility:public"],
)
-string_flag(
+rp_string_flag(
name = "bootstrap_impl",
build_setting_default = BootstrapImplFlag.SYSTEM_PYTHON,
+ override = select({
+ # Windows doesn't yet support bootstrap=script, so force disable it
+ ":_is_windows": BootstrapImplFlag.SYSTEM_PYTHON,
+ "//conditions:default": "",
+ }),
values = sorted(BootstrapImplFlag.__members__.values()),
# NOTE: Only public because it's an implicit dependency
visibility = ["//visibility:public"],
)
+# For some reason, @platforms//os:windows can't be directly used
+# in the select() for the flag. But it can be used when put behind
+# a config_setting().
+config_setting(
+ name = "_is_windows",
+ constraint_values = ["@platforms//os:windows"],
+)
+
# This is used for pip and hermetic toolchain resolution.
string_flag(
name = "py_linux_libc",
diff --git a/python/private/BUILD.bazel b/python/private/BUILD.bazel
index b63f446be3..9cc8ffc62c 100644
--- a/python/private/BUILD.bazel
+++ b/python/private/BUILD.bazel
@@ -86,6 +86,7 @@ bzl_library(
name = "runtime_env_toolchain_bzl",
srcs = ["runtime_env_toolchain.bzl"],
deps = [
+ ":config_settings_bzl",
":py_exec_tools_toolchain_bzl",
":toolchain_types_bzl",
"//python:py_runtime_bzl",
diff --git a/python/private/config_settings.bzl b/python/private/config_settings.bzl
index e5f9d865d1..2cf7968061 100644
--- a/python/private/config_settings.bzl
+++ b/python/private/config_settings.bzl
@@ -209,3 +209,33 @@ _current_config = rule(
"_template": attr.string(default = _DEBUG_ENV_MESSAGE_TEMPLATE),
},
)
+
+def is_python_version_at_least(name, **kwargs):
+ flag_name = "_{}_flag".format(name)
+ native.config_setting(
+ name = name,
+ flag_values = {
+ flag_name: "yes",
+ },
+ )
+ _python_version_at_least(
+ name = flag_name,
+ visibility = ["//visibility:private"],
+ **kwargs
+ )
+
+def _python_version_at_least_impl(ctx):
+ at_least = tuple(ctx.attr.at_least.split("."))
+ current = tuple(
+ ctx.attr._major_minor[config_common.FeatureFlagInfo].value.split("."),
+ )
+ value = "yes" if current >= at_least else "no"
+ return [config_common.FeatureFlagInfo(value = value)]
+
+_python_version_at_least = rule(
+ implementation = _python_version_at_least_impl,
+ attrs = {
+ "at_least": attr.string(mandatory = True),
+ "_major_minor": attr.label(default = _PYTHON_VERSION_MAJOR_MINOR_FLAG),
+ },
+)
diff --git a/python/private/flags.bzl b/python/private/flags.bzl
index c53e4610ff..40ce63b3b0 100644
--- a/python/private/flags.bzl
+++ b/python/private/flags.bzl
@@ -35,8 +35,38 @@ AddSrcsToRunfilesFlag = FlagEnum(
is_enabled = _AddSrcsToRunfilesFlag_is_enabled,
)
+def _string_flag_impl(ctx):
+ if ctx.attr.override:
+ value = ctx.attr.override
+ else:
+ value = ctx.build_setting_value
+
+ if value not in ctx.attr.values:
+ fail((
+ "Invalid value for {name}: got {value}, must " +
+ "be one of {allowed}"
+ ).format(
+ name = ctx.label,
+ value = value,
+ allowed = ctx.attr.values,
+ ))
+
+ return [
+ BuildSettingInfo(value = value),
+ config_common.FeatureFlagInfo(value = value),
+ ]
+
+string_flag = rule(
+ implementation = _string_flag_impl,
+ build_setting = config.string(flag = True),
+ attrs = {
+ "override": attr.string(),
+ "values": attr.string_list(),
+ },
+)
+
def _bootstrap_impl_flag_get_value(ctx):
- return ctx.attr._bootstrap_impl_flag[BuildSettingInfo].value
+ return ctx.attr._bootstrap_impl_flag[config_common.FeatureFlagInfo].value
# buildifier: disable=name-conventions
BootstrapImplFlag = enum(
diff --git a/python/private/get_local_runtime_info.py b/python/private/get_local_runtime_info.py
index 0207f56bef..19db3a2935 100644
--- a/python/private/get_local_runtime_info.py
+++ b/python/private/get_local_runtime_info.py
@@ -22,6 +22,7 @@
"micro": sys.version_info.micro,
"include": sysconfig.get_path("include"),
"implementation_name": sys.implementation.name,
+ "base_executable": sys._base_executable,
}
config_vars = [
diff --git a/python/private/local_runtime_repo.bzl b/python/private/local_runtime_repo.bzl
index fb1a8e29ac..ec0643e497 100644
--- a/python/private/local_runtime_repo.bzl
+++ b/python/private/local_runtime_repo.bzl
@@ -84,6 +84,20 @@ def _local_runtime_repo_impl(rctx):
info = json.decode(exec_result.stdout)
logger.info(lambda: _format_get_info_result(info))
+ # We use base_executable because we want the path within a Python
+ # installation directory ("PYTHONHOME"). The problems with sys.executable
+ # are:
+ # * If we're in an activated venv, then we don't want the venv's
+ # `bin/python3` path to be used -- it isn't an actual Python installation.
+ # * If sys.executable is a wrapper (e.g. pyenv), then (1) it may not be
+ # located within an actual Python installation directory, and (2) it
+ # can interfer with Python recognizing when it's within a venv.
+ #
+ # In some cases, it may be a symlink (usually e.g. `python3->python3.12`),
+ # but we don't realpath() it to respect what it has decided is the
+ # appropriate path.
+ interpreter_path = info["base_executable"]
+
# NOTE: Keep in sync with recursive glob in define_local_runtime_toolchain_impl
repo_utils.watch_tree(rctx, rctx.path(info["include"]))
diff --git a/python/private/py_executable.bzl b/python/private/py_executable.bzl
index b4cda21b1d..a8c669afd9 100644
--- a/python/private/py_executable.bzl
+++ b/python/private/py_executable.bzl
@@ -350,6 +350,7 @@ def _create_executable(
main_py = main_py,
imports = imports,
runtime_details = runtime_details,
+ venv = venv,
)
extra_runfiles = ctx.runfiles([stage2_bootstrap] + venv.files_without_interpreter)
zip_main = _create_zip_main(
@@ -538,11 +539,14 @@ def _create_venv(ctx, output_prefix, imports, runtime_details):
ctx.actions.write(pyvenv_cfg, "")
runtime = runtime_details.effective_runtime
+
venvs_use_declare_symlink_enabled = (
VenvsUseDeclareSymlinkFlag.get_value(ctx) == VenvsUseDeclareSymlinkFlag.YES
)
+ recreate_venv_at_runtime = False
- if not venvs_use_declare_symlink_enabled:
+ if not venvs_use_declare_symlink_enabled or not runtime.supports_build_time_venv:
+ recreate_venv_at_runtime = True
if runtime.interpreter:
interpreter_actual_path = runfiles_root_path(ctx, runtime.interpreter.short_path)
else:
@@ -557,6 +561,8 @@ def _create_venv(ctx, output_prefix, imports, runtime_details):
ctx.actions.write(interpreter, "actual:{}".format(interpreter_actual_path))
elif runtime.interpreter:
+ # Some wrappers around the interpreter (e.g. pyenv) use the program
+ # name to decide what to do, so preserve the name.
py_exe_basename = paths.basename(runtime.interpreter.short_path)
# Even though ctx.actions.symlink() is used, using
@@ -594,7 +600,8 @@ def _create_venv(ctx, output_prefix, imports, runtime_details):
if "t" in runtime.abi_flags:
version += "t"
- site_packages = "{}/lib/python{}/site-packages".format(venv, version)
+ venv_site_packages = "lib/python{}/site-packages".format(version)
+ site_packages = "{}/{}".format(venv, venv_site_packages)
pth = ctx.actions.declare_file("{}/bazel.pth".format(site_packages))
ctx.actions.write(pth, "import _bazel_site_init\n")
@@ -616,10 +623,12 @@ def _create_venv(ctx, output_prefix, imports, runtime_details):
return struct(
interpreter = interpreter,
- recreate_venv_at_runtime = not venvs_use_declare_symlink_enabled,
+ recreate_venv_at_runtime = recreate_venv_at_runtime,
# Runfiles root relative path or absolute path
interpreter_actual_path = interpreter_actual_path,
files_without_interpreter = [pyvenv_cfg, pth, site_init] + site_packages_symlinks,
+ # string; venv-relative path to the site-packages directory.
+ venv_site_packages = venv_site_packages,
)
def _create_site_packages_symlinks(ctx, site_packages):
@@ -716,7 +725,8 @@ def _create_stage2_bootstrap(
output_sibling,
main_py,
imports,
- runtime_details):
+ runtime_details,
+ venv = None):
output = ctx.actions.declare_file(
# Prepend with underscore to prevent pytest from trying to
# process the bootstrap for files starting with `test_`
@@ -731,6 +741,14 @@ def _create_stage2_bootstrap(
main_py_path = "{}/{}".format(ctx.workspace_name, main_py.short_path)
else:
main_py_path = ""
+
+ # The stage2 bootstrap uses the venv site-packages location to fix up issues
+ # that occur when the toolchain doesn't support the build-time venv.
+ if venv and not runtime.supports_build_time_venv:
+ venv_rel_site_packages = venv.venv_site_packages
+ else:
+ venv_rel_site_packages = ""
+
ctx.actions.expand_template(
template = template,
output = output,
@@ -741,6 +759,7 @@ def _create_stage2_bootstrap(
"%main%": main_py_path,
"%main_module%": ctx.attr.main_module,
"%target%": str(ctx.label),
+ "%venv_rel_site_packages%": venv_rel_site_packages,
"%workspace_name%": ctx.workspace_name,
},
is_executable = True,
@@ -766,6 +785,12 @@ def _create_stage1_bootstrap(
python_binary_actual = venv.interpreter_actual_path if venv else ""
+ # Runtime may be None on Windows due to the --python_path flag.
+ if runtime and runtime.supports_build_time_venv:
+ resolve_python_binary_at_runtime = "0"
+ else:
+ resolve_python_binary_at_runtime = "1"
+
subs = {
"%interpreter_args%": "\n".join([
'"{}"'.format(v)
@@ -775,7 +800,9 @@ def _create_stage1_bootstrap(
"%python_binary%": python_binary_path,
"%python_binary_actual%": python_binary_actual,
"%recreate_venv_at_runtime%": str(int(venv.recreate_venv_at_runtime)) if venv else "0",
+ "%resolve_python_binary_at_runtime%": resolve_python_binary_at_runtime,
"%target%": str(ctx.label),
+ "%venv_rel_site_packages%": venv.venv_site_packages if venv else "",
"%workspace_name%": ctx.workspace_name,
}
diff --git a/python/private/py_runtime_info.bzl b/python/private/py_runtime_info.bzl
index 4297391068..d2ae17e360 100644
--- a/python/private/py_runtime_info.bzl
+++ b/python/private/py_runtime_info.bzl
@@ -67,7 +67,8 @@ def _PyRuntimeInfo_init(
stage2_bootstrap_template = None,
zip_main_template = None,
abi_flags = "",
- site_init_template = None):
+ site_init_template = None,
+ supports_build_time_venv = True):
if (interpreter_path and interpreter) or (not interpreter_path and not interpreter):
fail("exactly one of interpreter or interpreter_path must be specified")
@@ -119,6 +120,7 @@ def _PyRuntimeInfo_init(
"site_init_template": site_init_template,
"stage2_bootstrap_template": stage2_bootstrap_template,
"stub_shebang": stub_shebang,
+ "supports_build_time_venv": supports_build_time_venv,
"zip_main_template": zip_main_template,
}
@@ -312,6 +314,28 @@ The following substitutions are made during template expansion:
"Shebang" expression prepended to the bootstrapping Python stub
script used when executing {obj}`py_binary` targets. Does not
apply to Windows.
+""",
+ "supports_build_time_venv": """
+:type: bool
+
+True if this toolchain supports the build-time created virtual environment.
+False if not or unknown. If build-time venv creation isn't supported, then binaries may
+fallback to non-venv solutions or creating a venv at runtime.
+
+In order to use the build-time created virtual environment, a toolchain needs
+to meet two criteria:
+1. Specifying the underlying executable (e.g. `/usr/bin/python3`, as reported by
+ `sys._base_executable`) for the venv executable (`$venv/bin/python3`, as reported
+ by `sys.executable`). This typically requires relative symlinking the venv
+ path to the underlying path at build time, or using the `PYTHONEXECUTABLE`
+ environment variable (Python 3.11+) at runtime.
+2. Having the build-time created site-packages directory
+ (`/lib/python{version}/site-packages`) recognized by the runtime
+ interpreter. This typically requires the Python version to be known at
+ build-time and match at runtime.
+
+:::{versionadded} VERSION_NEXT_FEATURE
+:::
""",
"zip_main_template": """
:type: File
diff --git a/python/private/py_runtime_rule.bzl b/python/private/py_runtime_rule.bzl
index a85f5b25f2..6dadcfeac3 100644
--- a/python/private/py_runtime_rule.bzl
+++ b/python/private/py_runtime_rule.bzl
@@ -130,6 +130,7 @@ def _py_runtime_impl(ctx):
zip_main_template = ctx.file.zip_main_template,
abi_flags = abi_flags,
site_init_template = ctx.file.site_init_template,
+ supports_build_time_venv = ctx.attr.supports_build_time_venv,
))
if not IS_BAZEL_7_OR_HIGHER:
@@ -353,6 +354,17 @@ motivation.
Does not apply to Windows.
""",
),
+ "supports_build_time_venv": attr.bool(
+ doc = """
+Whether this runtime supports virtualenvs created at build time.
+
+See {obj}`PyRuntimeInfo.supports_build_time_venv` for docs.
+
+:::{versionadded} VERSION_NEXT_FEATURE
+:::
+""",
+ default = True,
+ ),
"zip_main_template": attr.label(
default = "//python/private:zip_main_template",
allow_single_file = True,
diff --git a/python/private/runtime_env_toolchain.bzl b/python/private/runtime_env_toolchain.bzl
index 2116012c03..1956ad5e95 100644
--- a/python/private/runtime_env_toolchain.bzl
+++ b/python/private/runtime_env_toolchain.bzl
@@ -17,6 +17,7 @@ load("@rules_cc//cc:cc_library.bzl", "cc_library")
load("//python:py_runtime.bzl", "py_runtime")
load("//python:py_runtime_pair.bzl", "py_runtime_pair")
load("//python/cc:py_cc_toolchain.bzl", "py_cc_toolchain")
+load("//python/private:config_settings.bzl", "is_python_version_at_least")
load(":py_exec_tools_toolchain.bzl", "py_exec_tools_toolchain")
load(":toolchain_types.bzl", "EXEC_TOOLS_TOOLCHAIN_TYPE", "PY_CC_TOOLCHAIN_TYPE", "TARGET_TOOLCHAIN_TYPE")
@@ -38,6 +39,11 @@ def define_runtime_env_toolchain(name):
"""
base_name = name.replace("_toolchain", "")
+ supports_build_time_venv = select({
+ ":_is_at_least_py3.11": True,
+ "//conditions:default": False,
+ })
+
py_runtime(
name = "_runtime_env_py3_runtime",
interpreter = "//python/private:runtime_env_toolchain_interpreter.sh",
@@ -45,6 +51,7 @@ def define_runtime_env_toolchain(name):
stub_shebang = "#!/usr/bin/env python3",
visibility = ["//visibility:private"],
tags = ["manual"],
+ supports_build_time_venv = supports_build_time_venv,
)
# This is a dummy runtime whose interpreter_path triggers the native rule
@@ -56,6 +63,7 @@ def define_runtime_env_toolchain(name):
python_version = "PY3",
visibility = ["//visibility:private"],
tags = ["manual"],
+ supports_build_time_venv = supports_build_time_venv,
)
py_runtime_pair(
@@ -110,3 +118,7 @@ def define_runtime_env_toolchain(name):
toolchain_type = PY_CC_TOOLCHAIN_TYPE,
visibility = ["//visibility:public"],
)
+ is_python_version_at_least(
+ name = "_is_at_least_py3.11",
+ at_least = "3.11",
+ )
diff --git a/python/private/runtime_env_toolchain_interpreter.sh b/python/private/runtime_env_toolchain_interpreter.sh
index b09bc53e5c..6159d4f38c 100755
--- a/python/private/runtime_env_toolchain_interpreter.sh
+++ b/python/private/runtime_env_toolchain_interpreter.sh
@@ -53,5 +53,29 @@ documentation for py_runtime_pair \
(https://github.com/bazel-contrib/rules_python/blob/master/docs/python.md#py_runtime_pair)."
fi
-exec "$PYTHON_BIN" "$@"
+# Because this is a wrapper script that invokes Python, it prevents Python from
+# detecting virtualenvs like normal (i.e. using the venv symlink to find the
+# real interpreter). To work around this, we have to manually detect the venv,
+# then trick the interpreter into understanding we're in a virtual env.
+self_dir=$(dirname "$0")
+if [ -e "$self_dir/pyvenv.cfg" ] || [ -e "$self_dir/../pyvenv.cfg" ]; then
+ case "$0" in
+ /*)
+ venv_bin="$0"
+ ;;
+ *)
+ venv_bin="$PWD/$0"
+ ;;
+ esac
+ # PYTHONEXECUTABLE is also used because `exec -a` doesn't fully trick the
+ # pyenv wrappers.
+ # NOTE: The PYTHONEXECUTABLE envvar only works for non-Mac starting in Python 3.11
+ export PYTHONEXECUTABLE="$venv_bin"
+ # Python looks at argv[0] to determine sys.executable, so use exec -a
+ # to make it think it's the venv's binary, not the actual one invoked.
+ # NOTE: exec -a isn't strictly posix-compatible, but very widespread
+ exec -a "$venv_bin" "$PYTHON_BIN" "$@"
+else
+ exec "$PYTHON_BIN" "$@"
+fi
diff --git a/python/private/site_init_template.py b/python/private/site_init_template.py
index 40fb4e4139..a87a0d2a8f 100644
--- a/python/private/site_init_template.py
+++ b/python/private/site_init_template.py
@@ -125,6 +125,14 @@ def _search_path(name):
def _setup_sys_path():
+ """Perform Bazel/binary specific sys.path setup.
+
+ NOTE: We do not add _RUNFILES_ROOT to sys.path for two reasons:
+ 1. Under workspace, it makes every external repository importable. If a Bazel
+ repository matches a Python import name, they conflict.
+ 2. Under bzlmod, the repo names in the runfiles directory aren't importable
+ Python names, so there's no point in adding the runfiles root to sys.path.
+ """
seen = set(sys.path)
python_path_entries = []
@@ -195,5 +203,27 @@ def _maybe_add_path(path):
return coverage_setup
+def _fixup_sys_base_executable():
+ """Fixup sys._base_executable to account for Bazel-specific pyvenv.cfg
+
+ The pyvenv.cfg created for py_binary leaves the `home` key unset. A
+ side-effect of this is `sys._base_executable` points to the venv executable,
+ not the actual executable. This mostly doesn't matter, but does affect
+ using the venv module to create venvs (they point to the venv executable, not
+ the actual executable).
+ """
+ # Must have been set correctly?
+ if sys.executable != sys._base_executable:
+ return
+ # Not in a venv, so don't touch anything.
+ if sys.prefix == sys.base_prefix:
+ return
+ exe = os.path.realpath(sys.executable)
+ _print_verbose("setting sys._base_executable:", exe)
+ sys._base_executable = exe
+
+
+_fixup_sys_base_executable()
+
COVERAGE_SETUP = _setup_sys_path()
_print_verbose("DONE")
diff --git a/python/private/stage1_bootstrap_template.sh b/python/private/stage1_bootstrap_template.sh
index c487624934..d992b55cae 100644
--- a/python/private/stage1_bootstrap_template.sh
+++ b/python/private/stage1_bootstrap_template.sh
@@ -9,7 +9,8 @@ fi
# runfiles-relative path
STAGE2_BOOTSTRAP="%stage2_bootstrap%"
-# runfiles-relative path to python interpreter to use
+# runfiles-relative path to python interpreter to use.
+# This is the `bin/python3` path in the binary's venv.
PYTHON_BINARY='%python_binary%'
# The path that PYTHON_BINARY should symlink to.
# runfiles-relative path, absolute path, or single word.
@@ -18,8 +19,17 @@ PYTHON_BINARY_ACTUAL="%python_binary_actual%"
# 0 or 1
IS_ZIPFILE="%is_zipfile%"
-# 0 or 1
+# 0 or 1.
+# If 1, then a venv will be created at runtime that replicates what would have
+# been the build-time structure.
RECREATE_VENV_AT_RUNTIME="%recreate_venv_at_runtime%"
+# 0 or 1
+# If 1, then the path to python will be resolved by running
+# PYTHON_BINARY_ACTUAL to determine the actual underlying interpreter.
+RESOLVE_PYTHON_BINARY_AT_RUNTIME="%resolve_python_binary_at_runtime%"
+# venv-relative path to the site-packages
+# e.g. lib/python3.12t/site-packages
+VENV_REL_SITE_PACKAGES="%venv_rel_site_packages%"
# array of strings
declare -a INTERPRETER_ARGS_FROM_TARGET=(
@@ -152,34 +162,72 @@ elif [[ "$RECREATE_VENV_AT_RUNTIME" == "1" ]]; then
fi
fi
- if [[ "$PYTHON_BINARY_ACTUAL" == /* ]]; then
- # An absolute path, i.e. platform runtime, e.g. /usr/bin/python3
- symlink_to=$PYTHON_BINARY_ACTUAL
- elif [[ "$PYTHON_BINARY_ACTUAL" == */* ]]; then
- # A runfiles-relative path
- symlink_to="$RUNFILES_DIR/$PYTHON_BINARY_ACTUAL"
- else
- # A plain word, e.g. "python3". Symlink to where PATH leads
- symlink_to=$(which $PYTHON_BINARY_ACTUAL)
- # Guard against trying to symlink to an empty value
- if [[ $? -ne 0 ]]; then
- echo >&2 "ERROR: Python to use not found on PATH: $PYTHON_BINARY_ACTUAL"
- exit 1
- fi
- fi
- mkdir -p "$venv/bin"
# Match the basename; some tools, e.g. pyvenv key off the executable name
python_exe="$venv/bin/$(basename $PYTHON_BINARY_ACTUAL)"
+
if [[ ! -e "$python_exe" ]]; then
- ln -s "$symlink_to" "$python_exe"
+ if [[ "$PYTHON_BINARY_ACTUAL" == /* ]]; then
+ # An absolute path, i.e. platform runtime, e.g. /usr/bin/python3
+ python_exe_actual=$PYTHON_BINARY_ACTUAL
+ elif [[ "$PYTHON_BINARY_ACTUAL" == */* ]]; then
+ # A runfiles-relative path
+ python_exe_actual="$RUNFILES_DIR/$PYTHON_BINARY_ACTUAL"
+ else
+ # A plain word, e.g. "python3". Symlink to where PATH leads
+ python_exe_actual=$(which $PYTHON_BINARY_ACTUAL)
+ # Guard against trying to symlink to an empty value
+ if [[ $? -ne 0 ]]; then
+ echo >&2 "ERROR: Python to use not found on PATH: $PYTHON_BINARY_ACTUAL"
+ exit 1
+ fi
+ fi
+
+ runfiles_venv="$RUNFILES_DIR/$(dirname $(dirname $PYTHON_BINARY))"
+ # When RESOLVE_PYTHON_BINARY_AT_RUNTIME is true, it means the toolchain
+ # has thrown two complications at us:
+ # 1. The build-time assumption of the Python version may not match the
+ # runtime Python version. The site-packages directory path includes the
+ # Python version, so when the versions don't match, the runtime won't
+ # find it.
+ # 2. The interpreter might be a wrapper script, which interferes with Python's
+ # ability to detect when it's within a venv. Starting in Python 3.11,
+ # the PYTHONEXECUTABLE environment variable can fix this, but due to (1),
+ # we don't know if that is supported without running Python.
+ # To fix (1), we symlink the desired site-packages path to the build-time
+ # directory. Hopefully the version mismatch is OK :D.
+ # To fix (2), we determine the actual underlying interpreter and symlink
+ # to that.
+ if [[ "$RESOLVE_PYTHON_BINARY_AT_RUNTIME" == "1" ]]; then
+ {
+ read -r resolved_py_exe
+ read -r resolved_site_packages
+ } < <("$python_exe_actual" -I <
Date: Mon, 21 Apr 2025 17:00:40 -0700
Subject: [PATCH 044/156] fix: escape more invalid repo string characters
(#2801)
Also escape plus and percent when generating the repo name from the
wheel version.
Sometimes they have such characters in them.
Fixes https://github.com/bazel-contrib/rules_python/issues/2799
Co-authored-by: Richard Levasseur
---
python/private/pypi/whl_repo_name.bzl | 2 +-
tests/pypi/whl_repo_name/whl_repo_name_tests.bzl | 12 ++++++++++++
2 files changed, 13 insertions(+), 1 deletion(-)
diff --git a/python/private/pypi/whl_repo_name.bzl b/python/private/pypi/whl_repo_name.bzl
index 02a7c8142c..2b3b5418aa 100644
--- a/python/private/pypi/whl_repo_name.bzl
+++ b/python/private/pypi/whl_repo_name.bzl
@@ -44,7 +44,7 @@ def whl_repo_name(filename, sha256):
else:
parsed = parse_whl_name(filename)
name = normalize_name(parsed.distribution)
- version = parsed.version.replace(".", "_").replace("!", "_")
+ version = parsed.version.replace(".", "_").replace("!", "_").replace("+", "_").replace("%", "_")
python_tag, _, _ = parsed.python_tag.partition(".")
abi_tag, _, _ = parsed.abi_tag.partition(".")
platform_tag, _, _ = parsed.platform_tag.partition(".")
diff --git a/tests/pypi/whl_repo_name/whl_repo_name_tests.bzl b/tests/pypi/whl_repo_name/whl_repo_name_tests.bzl
index f0d1d059e1..35e6bcdf9f 100644
--- a/tests/pypi/whl_repo_name/whl_repo_name_tests.bzl
+++ b/tests/pypi/whl_repo_name/whl_repo_name_tests.bzl
@@ -54,6 +54,18 @@ def _test_platform_whl(env):
_tests.append(_test_platform_whl)
+def _test_name_with_plus(env):
+ got = whl_repo_name("gptqmodel-2.0.0+cu126torch2.6-cp312-cp312-linux_x86_64.whl", "")
+ env.expect.that_str(got).equals("gptqmodel_2_0_0_cu126torch2_6_cp312_cp312_linux_x86_64")
+
+_tests.append(_test_name_with_plus)
+
+def _test_name_with_percent(env):
+ got = whl_repo_name("gptqmodel-2.0.0%2Bcu126torch2.6-cp312-cp312-linux_x86_64.whl", "")
+ env.expect.that_str(got).equals("gptqmodel_2_0_0_2Bcu126torch2_6_cp312_cp312_linux_x86_64")
+
+_tests.append(_test_name_with_percent)
+
def whl_repo_name_test_suite(name):
"""Create the test suite.
From 1d69ad68d7959570acde61d8705f1f437c0691b0 Mon Sep 17 00:00:00 2001
From: Keith Smiley
Date: Tue, 22 Apr 2025 05:49:15 -0700
Subject: [PATCH 045/156] fix: parsing metadata with inline licenses (#2806)
The wheel `METADATA` parsing implemented in 1.4 missed the fact
that whitespace is significant and sometimes License is included
inline in the `METADATA` file itself.
This change ensures that we stop parsing the `METADATA` file only
on first completely empty line.
Fixes https://github.com/bazel-contrib/rules_python/issues/2796
---------
Co-authored-by: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
---
python/private/pypi/whl_metadata.bzl | 2 +-
.../pypi/whl_metadata/whl_metadata_tests.bzl | 31 +++++++++++++++++++
2 files changed, 32 insertions(+), 1 deletion(-)
diff --git a/python/private/pypi/whl_metadata.bzl b/python/private/pypi/whl_metadata.bzl
index 8a86ffbff1..cf2d51afda 100644
--- a/python/private/pypi/whl_metadata.bzl
+++ b/python/private/pypi/whl_metadata.bzl
@@ -52,7 +52,7 @@ def parse_whl_metadata(contents):
"version": "",
}
for line in contents.strip().split("\n"):
- if not line.strip():
+ if not line:
# Stop parsing on first empty line, which marks the end of the
# headers containing the metadata.
break
diff --git a/tests/pypi/whl_metadata/whl_metadata_tests.bzl b/tests/pypi/whl_metadata/whl_metadata_tests.bzl
index 4acbc9213d..329423a26c 100644
--- a/tests/pypi/whl_metadata/whl_metadata_tests.bzl
+++ b/tests/pypi/whl_metadata/whl_metadata_tests.bzl
@@ -140,6 +140,37 @@ Requires-Dist: this will be ignored
_tests.append(_test_parse_metadata_all)
+def _test_parse_metadata_multiline_license(env):
+ got = _parse_whl_metadata(
+ env,
+ # NOTE: The trailing whitespace here is meaningful as an empty line
+ # denotes the end of the header.
+ contents = """\
+Name: foo
+Version: 0.0.1
+License: some License
+
+ some line
+
+ another line
+
+Requires-Dist: bar; extra == "all"
+Provides-Extra: all
+
+Requires-Dist: this will be ignored
+""",
+ )
+ got.name().equals("foo")
+ got.version().equals("0.0.1")
+ got.requires_dist().contains_exactly([
+ "bar; extra == \"all\"",
+ ])
+ got.provides_extra().contains_exactly([
+ "all",
+ ])
+
+_tests.append(_test_parse_metadata_multiline_license)
+
def whl_metadata_test_suite(name): # buildifier: disable=function-docstring
test_suite(
name = name,
From 830261e4b1c427c7f646f689fedf45117dd54aad Mon Sep 17 00:00:00 2001
From: Ignas Anikevicius <240938+aignas@users.noreply.github.com>
Date: Wed, 23 Apr 2025 01:45:10 +0900
Subject: [PATCH 046/156] test(pypi): add a test case for simpleapi html
parsing with % (#2811)
In addition to #2801 I wanted to ensure that we are getting the correct
filename when downloading wheels. It seems that the `%` in the wheel
filename might get through wheels that get referenced via direct URL in
the requirements.txt files.
---------
Co-authored-by: Richard Levasseur
Co-authored-by: Richard Levasseur
---
.../parse_simpleapi_html_tests.bzl | 19 +++++++++++++++++++
1 file changed, 19 insertions(+)
diff --git a/tests/pypi/parse_simpleapi_html/parse_simpleapi_html_tests.bzl b/tests/pypi/parse_simpleapi_html/parse_simpleapi_html_tests.bzl
index abaa7a6a49..191079d214 100644
--- a/tests/pypi/parse_simpleapi_html/parse_simpleapi_html_tests.bzl
+++ b/tests/pypi/parse_simpleapi_html/parse_simpleapi_html_tests.bzl
@@ -303,6 +303,25 @@ def _test_whls(env):
yanked = False,
),
),
+ (
+ struct(
+ attrs = [
+ 'href="/whl/cpu/torch-2.6.0%2Bcpu-cp39-cp39-manylinux_2_28_aarch64.whl#sha256=deadbeef"',
+ ],
+ filename = "torch-2.6.0+cpu-cp39-cp39-manylinux_2_28_aarch64.whl",
+ url = "https://example.org/",
+ ),
+ struct(
+ filename = "torch-2.6.0+cpu-cp39-cp39-manylinux_2_28_aarch64.whl",
+ metadata_sha256 = "",
+ metadata_url = "",
+ sha256 = "deadbeef",
+ version = "2.6.0+cpu",
+ # A URL with % could occur if directly written in requirements.
+ url = "https://example.org/whl/cpu/torch-2.6.0%2Bcpu-cp39-cp39-manylinux_2_28_aarch64.whl",
+ yanked = False,
+ ),
+ ),
]
for (input, want) in tests:
From fe88b2381b5d272437593dc3604fc834114e4a15 Mon Sep 17 00:00:00 2001
From: Brandon Chinn
Date: Tue, 22 Apr 2025 23:39:02 -0700
Subject: [PATCH 047/156] build: Run pre-commit everywhere (#2808)
Fix pre-commit issues.
Would be nice to run `pre-commit run -a` in CI, but won't fix that now
---------
Co-authored-by: Douglas Thor
---
.bazelrc | 4 ++--
.pre-commit-config.yaml | 2 +-
.../foo_external/py_binary_with_proto.py | 1 +
.../wheel/lib/module_with_type_annotations.py | 1 +
examples/wheel/test_publish.py | 2 +-
examples/wheel/wheel_test.py | 17 +++++++++--------
.../dependency_resolution_order/__init__.py | 3 +--
.../py312_syntax/pep_695_type_parameter.py | 1 -
.../dependency_resolver/dependency_resolver.py | 6 ++----
tests/integration/runner.py | 5 ++++-
tests/no_unsafe_paths/test.py | 4 ++--
tools/wheelmaker.py | 12 ++++++++----
12 files changed, 32 insertions(+), 26 deletions(-)
diff --git a/.bazelrc b/.bazelrc
index 4e6f2fa187..d2e0721526 100644
--- a/.bazelrc
+++ b/.bazelrc
@@ -4,8 +4,8 @@
# (Note, we cannot use `common --deleted_packages` because the bazel version command doesn't support it)
# To update these lines, execute
# `bazel run @rules_bazel_integration_test//tools:update_deleted_packages`
-build --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/pythonconfig,gazelle/python/private,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/custom_commands,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/local_toolchains,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/py_cc_toolchain_registered,tests/modules/other,tests/modules/other/nspkg_delta,tests/modules/other/nspkg_gamma
-query --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/pythonconfig,gazelle/python/private,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/custom_commands,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/local_toolchains,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/py_cc_toolchain_registered,tests/modules/other,tests/modules/other/nspkg_delta,tests/modules/other/nspkg_gamma
+build --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/python/private,gazelle/pythonconfig,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/custom_commands,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/local_toolchains,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/py_cc_toolchain_registered,tests/modules/other,tests/modules/other/nspkg_delta,tests/modules/other/nspkg_gamma
+query --deleted_packages=examples/build_file_generation,examples/build_file_generation/random_number_generator,examples/bzlmod,examples/bzlmod/entry_points,examples/bzlmod/entry_points/tests,examples/bzlmod/libs/my_lib,examples/bzlmod/other_module,examples/bzlmod/other_module/other_module/pkg,examples/bzlmod/patches,examples/bzlmod/py_proto_library,examples/bzlmod/py_proto_library/example.com/another_proto,examples/bzlmod/py_proto_library/example.com/proto,examples/bzlmod/runfiles,examples/bzlmod/tests,examples/bzlmod/tests/other_module,examples/bzlmod/whl_mods,examples/bzlmod_build_file_generation,examples/bzlmod_build_file_generation/other_module/other_module/pkg,examples/bzlmod_build_file_generation/runfiles,examples/multi_python_versions/libs/my_lib,examples/multi_python_versions/requirements,examples/multi_python_versions/tests,examples/pip_parse,examples/pip_parse_vendored,examples/pip_repository_annotations,examples/py_proto_library,examples/py_proto_library/example.com/another_proto,examples/py_proto_library/example.com/proto,gazelle,gazelle/manifest,gazelle/manifest/generate,gazelle/manifest/hasher,gazelle/manifest/test,gazelle/modules_mapping,gazelle/python,gazelle/python/private,gazelle/pythonconfig,tests/integration/compile_pip_requirements,tests/integration/compile_pip_requirements_test_from_external_repo,tests/integration/custom_commands,tests/integration/ignore_root_user_error,tests/integration/ignore_root_user_error/submodule,tests/integration/local_toolchains,tests/integration/pip_parse,tests/integration/pip_parse/empty,tests/integration/py_cc_toolchain_registered,tests/modules/other,tests/modules/other/nspkg_delta,tests/modules/other/nspkg_gamma
test --test_output=errors
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 2b451e89fa..67a02fc6c0 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -38,7 +38,7 @@ repos:
- --profile
- black
- repo: https://github.com/psf/black
- rev: 23.1.0
+ rev: 25.1.0
hooks:
- id: black
- repo: local
diff --git a/examples/bzlmod/py_proto_library/foo_external/py_binary_with_proto.py b/examples/bzlmod/py_proto_library/foo_external/py_binary_with_proto.py
index be34264b5a..67e798bb8f 100644
--- a/examples/bzlmod/py_proto_library/foo_external/py_binary_with_proto.py
+++ b/examples/bzlmod/py_proto_library/foo_external/py_binary_with_proto.py
@@ -2,4 +2,5 @@
if __name__ == "__main__":
import my_proto_pb2
+
sys.exit(0)
diff --git a/examples/wheel/lib/module_with_type_annotations.py b/examples/wheel/lib/module_with_type_annotations.py
index 13e0895160..eda57bae6a 100644
--- a/examples/wheel/lib/module_with_type_annotations.py
+++ b/examples/wheel/lib/module_with_type_annotations.py
@@ -12,5 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+
def function():
return "qux"
diff --git a/examples/wheel/test_publish.py b/examples/wheel/test_publish.py
index 47134d11f3..e6ec80721b 100644
--- a/examples/wheel/test_publish.py
+++ b/examples/wheel/test_publish.py
@@ -104,7 +104,7 @@ def test_upload_and_query_simple_api(self):
Links for example-minimal-library
- example_minimal_library-0.0.1-py3-none-any.whl
+ example_minimal_library-0.0.1-py3-none-any.whl