diff --git a/.github/.OwlBot.yaml b/.github/.OwlBot.yaml deleted file mode 100644 index c8b40cc7f..000000000 --- a/.github/.OwlBot.yaml +++ /dev/null @@ -1,19 +0,0 @@ -# Copyright 2021 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - -begin-after-commit-hash: 7af2cb8b2b725641ac0d07e2f256d453682802e6 - diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml deleted file mode 100644 index 311ebbb85..000000000 --- a/.github/auto-approve.yml +++ /dev/null @@ -1,3 +0,0 @@ -# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve -processes: - - "OwlBotTemplateChanges" diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml index b2016d119..21786a4eb 100644 --- a/.github/auto-label.yaml +++ b/.github/auto-label.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,3 +13,8 @@ # limitations under the License. requestsize: enabled: true + +path: + pullrequest: true + paths: + samples: "samples" diff --git a/.github/blunderbuss.yml b/.github/blunderbuss.yml new file mode 100644 index 000000000..1618464d1 --- /dev/null +++ b/.github/blunderbuss.yml @@ -0,0 +1,17 @@ +# Blunderbuss config +# +# This file controls who is assigned for pull requests and issues. +# Note: This file is autogenerated. To make changes to the assignee +# team, please update `codeowner_team` in `.repo-metadata.json`. +assign_issues: + - googleapis/actools-python + +assign_issues_by: + - labels: + - "samples" + to: + - googleapis/python-samples-reviewers + - googleapis/actools-python + +assign_prs: + - googleapis/actools-python diff --git a/.github/release-trigger.yml b/.github/cherry-pick-bot.yml similarity index 93% rename from .github/release-trigger.yml rename to .github/cherry-pick-bot.yml index d4ca94189..1e9cfcd3a 100644 --- a/.github/release-trigger.yml +++ b/.github/cherry-pick-bot.yml @@ -1 +1,2 @@ enabled: true + diff --git a/.github/release-please.yml b/.github/release-please.yml deleted file mode 100644 index 29601ad46..000000000 --- a/.github/release-please.yml +++ /dev/null @@ -1,11 +0,0 @@ -releaseType: python -handleGHRelease: true -# NOTE: this section is generated by synthtool.languages.python -# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py -branches: -- branch: v1 - handleGHRelease: true - releaseType: python -- branch: v0 - handleGHRelease: true - releaseType: python diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml deleted file mode 100644 index cdec3e9be..000000000 --- a/.github/sync-repo-settings.yaml +++ /dev/null @@ -1,35 +0,0 @@ -# https://github.com/googleapis/repo-automation-bots/tree/main/packages/sync-repo-settings -# Rules for main branch protection -branchProtectionRules: -# Identifies the protection rule pattern. Name of the branch to be protected. -# Defaults to `main` -- pattern: main - requiresCodeOwnerReviews: true - requiresStrictStatusChecks: true - requiredStatusCheckContexts: - - 'cla/google' - # No Kokoro: the following are Github actions - - 'lint' - - 'mypy' - - 'unit_grpc_gcp-3.7' - - 'unit_grpc_gcp-3.8' - - 'unit_grpc_gcp-3.9' - - 'unit_grpc_gcp-3.10' - - 'unit_grpc_gcp-3.11' - - 'unit-3.7' - - 'unit-3.8' - - 'unit-3.9' - - 'unit-3.10' - - 'unit-3.11' - - 'unit_wo_grpc-3.10' - - 'unit_wo_grpc-3.11' - - 'cover' - - 'docs' - - 'docfx' -permissionRules: - - team: actools-python - permission: admin - - team: actools - permission: admin - - team: yoshi-python - permission: push diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 221806ced..7ef5a2874 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -2,17 +2,21 @@ on: pull_request: branches: - main + +permissions: + contents: read + name: docs jobs: docs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: "3.9" + python-version: "3.10" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel @@ -24,9 +28,9 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install nox diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index d2aee5b7d..d4929b60f 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -2,17 +2,21 @@ on: pull_request: branches: - main + +permissions: + contents: read + name: lint jobs: lint: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: "3.10" + python-version: "3.14" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index a505525d2..21621fe22 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -2,17 +2,21 @@ on: pull_request: branches: - main + +permissions: + contents: read + name: mypy jobs: mypy: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: "3.10" + python-version: "3.14" - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml index 0d7789b6a..f0f80d482 100644 --- a/.github/workflows/unittest.yml +++ b/.github/workflows/unittest.yml @@ -5,70 +5,97 @@ on: branches: - main +permissions: + contents: read + jobs: - run-unittests: - name: unit${{ matrix.option }}-${{ matrix.python }} + unit-prerelease: + name: prerelease_deps runs-on: ubuntu-latest strategy: matrix: - option: ["", "_grpc_gcp", "_wo_grpc"] + python: ["3.14"] + option: ["prerelease"] + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + allow-prereleases: true + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run ${{ matrix.option }} tests + env: + COVERAGE_FILE: .coverage${{ matrix.option }}-${{matrix.python }} + run: | + nox -s prerelease_deps + unit: + name: unit${{ matrix.option }}-${{ matrix.python }} + # TODO(https://github.com/googleapis/gapic-generator-python/issues/2303): use `ubuntu-latest` once this bug is fixed. + # Use ubuntu-22.04 until Python 3.7 is removed from the test matrix + # https://docs.github.com/en/actions/using-github-hosted-runners/using-github-hosted-runners/about-github-hosted-runners#standard-github-hosted-runners-for-public-repositories + runs-on: ubuntu-22.04 + strategy: + matrix: python: - "3.7" - "3.8" - "3.9" - "3.10" - "3.11" - exclude: - - option: "_wo_grpc" - python: 3.7 - - option: "_wo_grpc" - python: 3.8 - - option: "_wo_grpc" - python: 3.9 + - "3.12" + - "3.13" + - "3.14" steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} + allow-prereleases: true - name: Install nox run: | python -m pip install --upgrade setuptools pip wheel python -m pip install nox - name: Run unit tests env: - COVERAGE_FILE: .coverage${{ matrix.option }}-${{matrix.python }} + COVERAGE_FILE: .coverage-${{matrix.python }} run: | - nox -s unit${{ matrix.option }}-${{ matrix.python }} + nox -s unit-${{ matrix.python }} - name: Upload coverage results - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: - name: coverage-artifacts - path: .coverage${{ matrix.option }}-${{ matrix.python }} + name: coverage-artifact-${{ matrix.python }} + path: .coverage-${{ matrix.python }} + include-hidden-files: true report-coverage: name: cover runs-on: ubuntu-latest needs: - - run-unittests + - unit steps: - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: "3.10" + python-version: "3.14" - name: Install coverage run: | python -m pip install --upgrade setuptools pip wheel python -m pip install coverage - name: Download coverage results - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: - name: coverage-artifacts path: .coverage-results/ - name: Report coverage results run: | - coverage combine .coverage-results/.coverage* + find .coverage-results -type f -name '*.zip' -exec unzip {} \; + coverage combine .coverage-results/**/.coverage* coverage report --show-missing --fail-under=100 diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 05618cbce..d41b45aa1 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,13 @@ set -eo pipefail +CURRENT_DIR=$(dirname "${BASH_SOURCE[0]}") + if [[ -z "${PROJECT_ROOT:-}" ]]; then - PROJECT_ROOT="github/python-api-core" + PROJECT_ROOT=$(realpath "${CURRENT_DIR}/..") fi -cd "${PROJECT_ROOT}" +pushd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -28,17 +30,16 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Setup service account credentials. -export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +if [[ -f "${KOKORO_GFILE_DIR}/service-account.json" ]] +then + export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json +fi # Setup project id. -export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") - -# Remove old nox -python3 -m pip uninstall --yes --quiet nox-automation - -# Install nox -python3 -m pip install --upgrade --quiet nox -python3 -m nox --version +if [[ -f "${KOKORO_GFILE_DIR}/project-id.json" ]] +then + export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") +fi # If this is a continuous build, send the test log to the FlakyBot. # See https://github.com/googleapis/repo-automation-bots/tree/main/packages/flakybot. @@ -53,7 +54,7 @@ fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3 -m nox -s ${NOX_SESSION:-} + python3 -m nox -s ${NOX_SESSION:-} else - python3 -m nox + python3 -m nox fi diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile deleted file mode 100644 index 8e39a2cc4..000000000 --- a/.kokoro/docker/docs/Dockerfile +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ubuntu:22.04 - -ENV DEBIAN_FRONTEND noninteractive - -# Ensure local Python is preferred over distribution Python. -ENV PATH /usr/local/bin:$PATH - -# Install dependencies. -RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - apt-transport-https \ - build-essential \ - ca-certificates \ - curl \ - dirmngr \ - git \ - gpg-agent \ - graphviz \ - libbz2-dev \ - libdb5.3-dev \ - libexpat1-dev \ - libffi-dev \ - liblzma-dev \ - libreadline-dev \ - libsnappy-dev \ - libssl-dev \ - libsqlite3-dev \ - portaudio19-dev \ - python3-distutils \ - redis-server \ - software-properties-common \ - ssh \ - sudo \ - tcl \ - tcl-dev \ - tk \ - tk-dev \ - uuid-dev \ - wget \ - zlib1g-dev \ - && add-apt-repository universe \ - && apt-get update \ - && apt-get -y install jq \ - && apt-get clean autoclean \ - && apt-get autoremove -y \ - && rm -rf /var/lib/apt/lists/* \ - && rm -f /var/cache/apt/archives/*.deb - -###################### Install python 3.9.13 - -# Download python 3.9.13 -RUN wget https://www.python.org/ftp/python/3.9.13/Python-3.9.13.tgz - -# Extract files -RUN tar -xvf Python-3.9.13.tgz - -# Install python 3.9.13 -RUN ./Python-3.9.13/configure --enable-optimizations -RUN make altinstall - -###################### Install pip -RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3 /tmp/get-pip.py \ - && rm /tmp/get-pip.py - -# Test pip -RUN python3 -m pip - -CMD ["python3.8"] diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg deleted file mode 100644 index 48e89855d..000000000 --- a/.kokoro/docs/common.cfg +++ /dev/null @@ -1,67 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-api-core/.kokoro/trampoline_v2.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-api-core/.kokoro/publish-docs.sh" -} - -env_vars: { - key: "STAGING_BUCKET" - value: "docs-staging" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - # Push non-cloud library docs to `docs-staging-v2-staging` instead of the - # Cloud RAD bucket `docs-staging-v2` - value: "docs-staging-v2-staging" -} - -# It will upload the docker image after successful builds. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "true" -} - -# It will always build the docker image. -env_vars: { - key: "TRAMPOLINE_DOCKERFILE" - value: ".kokoro/docker/docs/Dockerfile" -} - -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "docuploader_service_account" - } - } -} \ No newline at end of file diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg deleted file mode 100644 index d1ed51ebb..000000000 --- a/.kokoro/docs/docs-presubmit.cfg +++ /dev/null @@ -1,28 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -env_vars: { - key: "STAGING_BUCKET" - value: "gcloud-python-test" -} - -env_vars: { - key: "V2_STAGING_BUCKET" - value: "gcloud-python-test" -} - -# We only upload the image in the main `docs` build. -env_vars: { - key: "TRAMPOLINE_IMAGE_UPLOAD" - value: "false" -} - -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-api-core/.kokoro/build.sh" -} - -# Only run this nox session. -env_vars: { - key: "NOX_SESSION" - value: "docs docfx" -} diff --git a/.kokoro/docs/docs.cfg b/.kokoro/docs/docs.cfg deleted file mode 100644 index 8f43917d9..000000000 --- a/.kokoro/docs/docs.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh index 6f3972140..c435402f4 100755 --- a/.kokoro/populate-secrets.sh +++ b/.kokoro/populate-secrets.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC. +# Copyright 2024 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh deleted file mode 100755 index 9eafe0be3..000000000 --- a/.kokoro/publish-docs.sh +++ /dev/null @@ -1,62 +0,0 @@ -#!/bin/bash -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -export PATH="${HOME}/.local/bin:${PATH}" - -# Install nox -python3 -m pip install --require-hashes -r .kokoro/requirements.txt -python3 -m nox --version - -# build docs -nox -s docs - -# create metadata -python3 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" - - -# docfx yaml files -nox -s docfx - -# create metadata. -python3 -m docuploader create-metadata \ - --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ - --version=$(python3 setup.py --version) \ - --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ - --distribution-name=$(python3 setup.py --name) \ - --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ - --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ - --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) - -cat docs.metadata - -# upload docs -python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/.kokoro/release.sh b/.kokoro/release.sh deleted file mode 100755 index b07000dfb..000000000 --- a/.kokoro/release.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -set -eo pipefail - -# Start the releasetool reporter -python3 -m pip install --require-hashes -r github/python-api-core/.kokoro/requirements.txt -python3 -m releasetool publish-reporter-script > /tmp/publisher-script; source /tmp/publisher-script - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") -cd github/python-api-core -python3 setup.py sdist bdist_wheel -twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg deleted file mode 100644 index 5890a7fe3..000000000 --- a/.kokoro/release/common.cfg +++ /dev/null @@ -1,49 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto - -# Build logs will be here -action { - define_artifacts { - regex: "**/*sponge_log.xml" - } -} - -# Download trampoline resources. -gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" - -# Use the trampoline script to run in docker. -build_file: "python-api-core/.kokoro/trampoline.sh" - -# Configure the docker image for kokoro-trampoline. -env_vars: { - key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" -} -env_vars: { - key: "TRAMPOLINE_BUILD_FILE" - value: "github/python-api-core/.kokoro/release.sh" -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google-cloud-pypi-token-keystore-1" - } - } -} - -# Tokens needed to report release status back to GitHub -env_vars: { - key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" -} - -# Store the packages we uploaded to PyPI. That way, we have a record of exactly -# what we published, which we can use to generate SBOMs and attestations. -action { - define_artifacts { - regex: "github/python-api-core/**/*.tar.gz" - strip_prefix: "github/python-api-core" - } -} diff --git a/.kokoro/release/release.cfg b/.kokoro/release/release.cfg deleted file mode 100644 index 8f43917d9..000000000 --- a/.kokoro/release/release.cfg +++ /dev/null @@ -1 +0,0 @@ -# Format: //devtools/kokoro/config/proto/build.proto \ No newline at end of file diff --git a/.kokoro/requirements.in b/.kokoro/requirements.in deleted file mode 100644 index ec867d9fd..000000000 --- a/.kokoro/requirements.in +++ /dev/null @@ -1,10 +0,0 @@ -gcp-docuploader -gcp-releasetool>=1.10.5 # required for compatibility with cryptography>=39.x -importlib-metadata -typing-extensions -twine -wheel -setuptools -nox>=2022.11.21 # required to remove dependency on py -charset-normalizer<3 -click<8.1.0 diff --git a/.kokoro/requirements.txt b/.kokoro/requirements.txt deleted file mode 100644 index 8957e2110..000000000 --- a/.kokoro/requirements.txt +++ /dev/null @@ -1,515 +0,0 @@ -# -# This file is autogenerated by pip-compile with Python 3.9 -# by the following command: -# -# pip-compile --allow-unsafe --generate-hashes requirements.in -# -argcomplete==3.1.4 \ - --hash=sha256:72558ba729e4c468572609817226fb0a6e7e9a0a7d477b882be168c0b4a62b94 \ - --hash=sha256:fbe56f8cda08aa9a04b307d8482ea703e96a6a801611acb4be9bf3942017989f - # via nox -attrs==23.1.0 \ - --hash=sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04 \ - --hash=sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015 - # via gcp-releasetool -cachetools==5.3.2 \ - --hash=sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2 \ - --hash=sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1 - # via google-auth -certifi==2023.7.22 \ - --hash=sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082 \ - --hash=sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9 - # via requests -cffi==1.16.0 \ - --hash=sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc \ - --hash=sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a \ - --hash=sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417 \ - --hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \ - --hash=sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520 \ - --hash=sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36 \ - --hash=sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743 \ - --hash=sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8 \ - --hash=sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed \ - --hash=sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684 \ - --hash=sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56 \ - --hash=sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324 \ - --hash=sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d \ - --hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \ - --hash=sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e \ - --hash=sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088 \ - --hash=sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000 \ - --hash=sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7 \ - --hash=sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e \ - --hash=sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673 \ - --hash=sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c \ - --hash=sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe \ - --hash=sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2 \ - --hash=sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098 \ - --hash=sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8 \ - --hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \ - --hash=sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0 \ - --hash=sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b \ - --hash=sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896 \ - --hash=sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e \ - --hash=sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9 \ - --hash=sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2 \ - --hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \ - --hash=sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6 \ - --hash=sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404 \ - --hash=sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f \ - --hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \ - --hash=sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4 \ - --hash=sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc \ - --hash=sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936 \ - --hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \ - --hash=sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872 \ - --hash=sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb \ - --hash=sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614 \ - --hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1 \ - --hash=sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d \ - --hash=sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969 \ - --hash=sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b \ - --hash=sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4 \ - --hash=sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627 \ - --hash=sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956 \ - --hash=sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357 - # via cryptography -charset-normalizer==2.1.1 \ - --hash=sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845 \ - --hash=sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f - # via - # -r requirements.in - # requests -click==8.0.4 \ - --hash=sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1 \ - --hash=sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb - # via - # -r requirements.in - # gcp-docuploader - # gcp-releasetool -colorlog==6.7.0 \ - --hash=sha256:0d33ca236784a1ba3ff9c532d4964126d8a2c44f1f0cb1d2b0728196f512f662 \ - --hash=sha256:bd94bd21c1e13fac7bd3153f4bc3a7dc0eb0974b8bc2fdf1a989e474f6e582e5 - # via - # gcp-docuploader - # nox -cryptography==41.0.5 \ - --hash=sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf \ - --hash=sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84 \ - --hash=sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e \ - --hash=sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8 \ - --hash=sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7 \ - --hash=sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1 \ - --hash=sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88 \ - --hash=sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86 \ - --hash=sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179 \ - --hash=sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81 \ - --hash=sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20 \ - --hash=sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548 \ - --hash=sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d \ - --hash=sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d \ - --hash=sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5 \ - --hash=sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1 \ - --hash=sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147 \ - --hash=sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936 \ - --hash=sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797 \ - --hash=sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696 \ - --hash=sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72 \ - --hash=sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da \ - --hash=sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723 - # via - # gcp-releasetool - # secretstorage -distlib==0.3.7 \ - --hash=sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057 \ - --hash=sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8 - # via virtualenv -docutils==0.20.1 \ - --hash=sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6 \ - --hash=sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b - # via readme-renderer -filelock==3.13.1 \ - --hash=sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e \ - --hash=sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c - # via virtualenv -gcp-docuploader==0.6.5 \ - --hash=sha256:30221d4ac3e5a2b9c69aa52fdbef68cc3f27d0e6d0d90e220fc024584b8d2318 \ - --hash=sha256:b7458ef93f605b9d46a4bf3a8dc1755dad1f31d030c8679edf304e343b347eea - # via -r requirements.in -gcp-releasetool==1.16.0 \ - --hash=sha256:27bf19d2e87aaa884096ff941aa3c592c482be3d6a2bfe6f06afafa6af2353e3 \ - --hash=sha256:a316b197a543fd036209d0caba7a8eb4d236d8e65381c80cbc6d7efaa7606d63 - # via -r requirements.in -google-api-core==2.12.0 \ - --hash=sha256:c22e01b1e3c4dcd90998494879612c38d0a3411d1f7b679eb89e2abe3ce1f553 \ - --hash=sha256:ec6054f7d64ad13b41e43d96f735acbd763b0f3b695dabaa2d579673f6a6e160 - # via - # google-cloud-core - # google-cloud-storage -google-auth==2.23.4 \ - --hash=sha256:79905d6b1652187def79d491d6e23d0cbb3a21d3c7ba0dbaa9c8a01906b13ff3 \ - --hash=sha256:d4bbc92fe4b8bfd2f3e8d88e5ba7085935da208ee38a134fc280e7ce682a05f2 - # via - # gcp-releasetool - # google-api-core - # google-cloud-core - # google-cloud-storage -google-cloud-core==2.3.3 \ - --hash=sha256:37b80273c8d7eee1ae816b3a20ae43585ea50506cb0e60f3cf5be5f87f1373cb \ - --hash=sha256:fbd11cad3e98a7e5b0343dc07cb1039a5ffd7a5bb96e1f1e27cee4bda4a90863 - # via google-cloud-storage -google-cloud-storage==2.13.0 \ - --hash=sha256:ab0bf2e1780a1b74cf17fccb13788070b729f50c252f0c94ada2aae0ca95437d \ - --hash=sha256:f62dc4c7b6cd4360d072e3deb28035fbdad491ac3d9b0b1815a12daea10f37c7 - # via gcp-docuploader -google-crc32c==1.5.0 \ - --hash=sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a \ - --hash=sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876 \ - --hash=sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c \ - --hash=sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289 \ - --hash=sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298 \ - --hash=sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02 \ - --hash=sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f \ - --hash=sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2 \ - --hash=sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a \ - --hash=sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb \ - --hash=sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210 \ - --hash=sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5 \ - --hash=sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee \ - --hash=sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c \ - --hash=sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a \ - --hash=sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314 \ - --hash=sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd \ - --hash=sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65 \ - --hash=sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37 \ - --hash=sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4 \ - --hash=sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13 \ - --hash=sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894 \ - --hash=sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31 \ - --hash=sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e \ - --hash=sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709 \ - --hash=sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740 \ - --hash=sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc \ - --hash=sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d \ - --hash=sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c \ - --hash=sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c \ - --hash=sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d \ - --hash=sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906 \ - --hash=sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61 \ - --hash=sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57 \ - --hash=sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c \ - --hash=sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a \ - --hash=sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438 \ - --hash=sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946 \ - --hash=sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7 \ - --hash=sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96 \ - --hash=sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091 \ - --hash=sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae \ - --hash=sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d \ - --hash=sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88 \ - --hash=sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2 \ - --hash=sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd \ - --hash=sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541 \ - --hash=sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728 \ - --hash=sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178 \ - --hash=sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968 \ - --hash=sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346 \ - --hash=sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8 \ - --hash=sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93 \ - --hash=sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7 \ - --hash=sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273 \ - --hash=sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462 \ - --hash=sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94 \ - --hash=sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd \ - --hash=sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e \ - --hash=sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57 \ - --hash=sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b \ - --hash=sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9 \ - --hash=sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a \ - --hash=sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100 \ - --hash=sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325 \ - --hash=sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183 \ - --hash=sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556 \ - --hash=sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4 - # via - # google-cloud-storage - # google-resumable-media -google-resumable-media==2.6.0 \ - --hash=sha256:972852f6c65f933e15a4a210c2b96930763b47197cdf4aa5f5bea435efb626e7 \ - --hash=sha256:fc03d344381970f79eebb632a3c18bb1828593a2dc5572b5f90115ef7d11e81b - # via google-cloud-storage -googleapis-common-protos==1.61.0 \ - --hash=sha256:22f1915393bb3245343f6efe87f6fe868532efc12aa26b391b15132e1279f1c0 \ - --hash=sha256:8a64866a97f6304a7179873a465d6eee97b7a24ec6cfd78e0f575e96b821240b - # via google-api-core -idna==3.4 \ - --hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \ - --hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2 - # via requests -importlib-metadata==6.8.0 \ - --hash=sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb \ - --hash=sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743 - # via - # -r requirements.in - # keyring - # twine -jaraco-classes==3.3.0 \ - --hash=sha256:10afa92b6743f25c0cf5f37c6bb6e18e2c5bb84a16527ccfc0040ea377e7aaeb \ - --hash=sha256:c063dd08e89217cee02c8d5e5ec560f2c8ce6cdc2fcdc2e68f7b2e5547ed3621 - # via keyring -jeepney==0.8.0 \ - --hash=sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806 \ - --hash=sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755 - # via - # keyring - # secretstorage -jinja2==3.1.2 \ - --hash=sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852 \ - --hash=sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61 - # via gcp-releasetool -keyring==24.2.0 \ - --hash=sha256:4901caaf597bfd3bbd78c9a0c7c4c29fcd8310dab2cffefe749e916b6527acd6 \ - --hash=sha256:ca0746a19ec421219f4d713f848fa297a661a8a8c1504867e55bfb5e09091509 - # via - # gcp-releasetool - # twine -markdown-it-py==3.0.0 \ - --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ - --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb - # via rich -markupsafe==2.1.3 \ - --hash=sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e \ - --hash=sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e \ - --hash=sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431 \ - --hash=sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686 \ - --hash=sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c \ - --hash=sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559 \ - --hash=sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc \ - --hash=sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb \ - --hash=sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939 \ - --hash=sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c \ - --hash=sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0 \ - --hash=sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4 \ - --hash=sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9 \ - --hash=sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575 \ - --hash=sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba \ - --hash=sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d \ - --hash=sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd \ - --hash=sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3 \ - --hash=sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00 \ - --hash=sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155 \ - --hash=sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac \ - --hash=sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52 \ - --hash=sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f \ - --hash=sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8 \ - --hash=sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b \ - --hash=sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007 \ - --hash=sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24 \ - --hash=sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea \ - --hash=sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198 \ - --hash=sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0 \ - --hash=sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee \ - --hash=sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be \ - --hash=sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2 \ - --hash=sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1 \ - --hash=sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707 \ - --hash=sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6 \ - --hash=sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c \ - --hash=sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58 \ - --hash=sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823 \ - --hash=sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779 \ - --hash=sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636 \ - --hash=sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c \ - --hash=sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad \ - --hash=sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee \ - --hash=sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc \ - --hash=sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2 \ - --hash=sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48 \ - --hash=sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7 \ - --hash=sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e \ - --hash=sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b \ - --hash=sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa \ - --hash=sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5 \ - --hash=sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e \ - --hash=sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb \ - --hash=sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9 \ - --hash=sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57 \ - --hash=sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc \ - --hash=sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc \ - --hash=sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2 \ - --hash=sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11 - # via jinja2 -mdurl==0.1.2 \ - --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ - --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba - # via markdown-it-py -more-itertools==10.1.0 \ - --hash=sha256:626c369fa0eb37bac0291bce8259b332fd59ac792fa5497b59837309cd5b114a \ - --hash=sha256:64e0735fcfdc6f3464ea133afe8ea4483b1c5fe3a3d69852e6503b43a0b222e6 - # via jaraco-classes -nh3==0.2.14 \ - --hash=sha256:116c9515937f94f0057ef50ebcbcc10600860065953ba56f14473ff706371873 \ - --hash=sha256:18415df36db9b001f71a42a3a5395db79cf23d556996090d293764436e98e8ad \ - --hash=sha256:203cac86e313cf6486704d0ec620a992c8bc164c86d3a4fd3d761dd552d839b5 \ - --hash=sha256:2b0be5c792bd43d0abef8ca39dd8acb3c0611052ce466d0401d51ea0d9aa7525 \ - --hash=sha256:377aaf6a9e7c63962f367158d808c6a1344e2b4f83d071c43fbd631b75c4f0b2 \ - --hash=sha256:525846c56c2bcd376f5eaee76063ebf33cf1e620c1498b2a40107f60cfc6054e \ - --hash=sha256:5529a3bf99402c34056576d80ae5547123f1078da76aa99e8ed79e44fa67282d \ - --hash=sha256:7771d43222b639a4cd9e341f870cee336b9d886de1ad9bec8dddab22fe1de450 \ - --hash=sha256:88c753efbcdfc2644a5012938c6b9753f1c64a5723a67f0301ca43e7b85dcf0e \ - --hash=sha256:93a943cfd3e33bd03f77b97baa11990148687877b74193bf777956b67054dcc6 \ - --hash=sha256:9be2f68fb9a40d8440cbf34cbf40758aa7f6093160bfc7fb018cce8e424f0c3a \ - --hash=sha256:a0c509894fd4dccdff557068e5074999ae3b75f4c5a2d6fb5415e782e25679c4 \ - --hash=sha256:ac8056e937f264995a82bf0053ca898a1cb1c9efc7cd68fa07fe0060734df7e4 \ - --hash=sha256:aed56a86daa43966dd790ba86d4b810b219f75b4bb737461b6886ce2bde38fd6 \ - --hash=sha256:e8986f1dd3221d1e741fda0a12eaa4a273f1d80a35e31a1ffe579e7c621d069e \ - --hash=sha256:f99212a81c62b5f22f9e7c3e347aa00491114a5647e1f13bbebd79c3e5f08d75 - # via readme-renderer -nox==2023.4.22 \ - --hash=sha256:0b1adc619c58ab4fa57d6ab2e7823fe47a32e70202f287d78474adcc7bda1891 \ - --hash=sha256:46c0560b0dc609d7d967dc99e22cb463d3c4caf54a5fda735d6c11b5177e3a9f - # via -r requirements.in -packaging==23.2 \ - --hash=sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5 \ - --hash=sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7 - # via - # gcp-releasetool - # nox -pkginfo==1.9.6 \ - --hash=sha256:4b7a555a6d5a22169fcc9cf7bfd78d296b0361adad412a346c1226849af5e546 \ - --hash=sha256:8fd5896e8718a4372f0ea9cc9d96f6417c9b986e23a4d116dda26b62cc29d046 - # via twine -platformdirs==3.11.0 \ - --hash=sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3 \ - --hash=sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e - # via virtualenv -protobuf==3.20.3 \ - --hash=sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7 \ - --hash=sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c \ - --hash=sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2 \ - --hash=sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b \ - --hash=sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050 \ - --hash=sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9 \ - --hash=sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7 \ - --hash=sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454 \ - --hash=sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480 \ - --hash=sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469 \ - --hash=sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c \ - --hash=sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e \ - --hash=sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db \ - --hash=sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905 \ - --hash=sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b \ - --hash=sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86 \ - --hash=sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4 \ - --hash=sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402 \ - --hash=sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7 \ - --hash=sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4 \ - --hash=sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99 \ - --hash=sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee - # via - # gcp-docuploader - # gcp-releasetool - # google-api-core - # googleapis-common-protos -pyasn1==0.5.0 \ - --hash=sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57 \ - --hash=sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde - # via - # pyasn1-modules - # rsa -pyasn1-modules==0.3.0 \ - --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ - --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d - # via google-auth -pycparser==2.21 \ - --hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \ - --hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206 - # via cffi -pygments==2.16.1 \ - --hash=sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692 \ - --hash=sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29 - # via - # readme-renderer - # rich -pyjwt==2.8.0 \ - --hash=sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de \ - --hash=sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320 - # via gcp-releasetool -pyperclip==1.8.2 \ - --hash=sha256:105254a8b04934f0bc84e9c24eb360a591aaf6535c9def5f29d92af107a9bf57 - # via gcp-releasetool -python-dateutil==2.8.2 \ - --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ - --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 - # via gcp-releasetool -readme-renderer==42.0 \ - --hash=sha256:13d039515c1f24de668e2c93f2e877b9dbe6c6c32328b90a40a49d8b2b85f36d \ - --hash=sha256:2d55489f83be4992fe4454939d1a051c33edbab778e82761d060c9fc6b308cd1 - # via twine -requests==2.31.0 \ - --hash=sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f \ - --hash=sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1 - # via - # gcp-releasetool - # google-api-core - # google-cloud-storage - # requests-toolbelt - # twine -requests-toolbelt==1.0.0 \ - --hash=sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6 \ - --hash=sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06 - # via twine -rfc3986==2.0.0 \ - --hash=sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd \ - --hash=sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c - # via twine -rich==13.6.0 \ - --hash=sha256:2b38e2fe9ca72c9a00170a1a2d20c63c790d0e10ef1fe35eba76e1e7b1d7d245 \ - --hash=sha256:5c14d22737e6d5084ef4771b62d5d4363165b403455a30a1c8ca39dc7b644bef - # via twine -rsa==4.9 \ - --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \ - --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21 - # via google-auth -secretstorage==3.3.3 \ - --hash=sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77 \ - --hash=sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99 - # via keyring -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via - # gcp-docuploader - # python-dateutil -twine==4.0.2 \ - --hash=sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8 \ - --hash=sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8 - # via -r requirements.in -typing-extensions==4.8.0 \ - --hash=sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0 \ - --hash=sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef - # via -r requirements.in -urllib3==2.0.7 \ - --hash=sha256:c97dfde1f7bd43a71c8d2a58e369e9b2bf692d1334ea9f9cae55add7d0dd0f84 \ - --hash=sha256:fdb6d215c776278489906c2f8916e6e7d4f5a9b602ccbcfdf7f016fc8da0596e - # via - # requests - # twine -virtualenv==20.24.6 \ - --hash=sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af \ - --hash=sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381 - # via nox -wheel==0.41.3 \ - --hash=sha256:488609bc63a29322326e05560731bf7bfea8e48ad646e1f5e40d366607de0942 \ - --hash=sha256:4d4987ce51a49370ea65c0bfd2234e8ce80a12780820d9dc462597a6e60d0841 - # via -r requirements.in -zipp==3.17.0 \ - --hash=sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31 \ - --hash=sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0 - # via importlib-metadata - -# The following packages are considered to be unsafe in a requirements file: -setuptools==68.2.2 \ - --hash=sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87 \ - --hash=sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a - # via -r requirements.in diff --git a/.kokoro/samples/python3.12/common.cfg b/.kokoro/samples/python3.12/common.cfg new file mode 100644 index 000000000..8a5840a72 --- /dev/null +++ b/.kokoro/samples/python3.12/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.12" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-312" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-api-core/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-api-core/.kokoro/trampoline_v2.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.12/continuous.cfg b/.kokoro/samples/python3.12/continuous.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.12/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.12/periodic-head.cfg b/.kokoro/samples/python3.12/periodic-head.cfg new file mode 100644 index 000000000..a18c0cfc6 --- /dev/null +++ b/.kokoro/samples/python3.12/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-api-core/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.12/periodic.cfg b/.kokoro/samples/python3.12/periodic.cfg new file mode 100644 index 000000000..71cd1e597 --- /dev/null +++ b/.kokoro/samples/python3.12/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.12/presubmit.cfg b/.kokoro/samples/python3.12/presubmit.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.12/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.13/common.cfg b/.kokoro/samples/python3.13/common.cfg new file mode 100644 index 000000000..2a4199f40 --- /dev/null +++ b/.kokoro/samples/python3.13/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.13" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-313" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-api-core/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-api-core/.kokoro/trampoline_v2.sh" diff --git a/.kokoro/samples/python3.13/continuous.cfg b/.kokoro/samples/python3.13/continuous.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.13/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.13/periodic-head.cfg b/.kokoro/samples/python3.13/periodic-head.cfg new file mode 100644 index 000000000..a18c0cfc6 --- /dev/null +++ b/.kokoro/samples/python3.13/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-api-core/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.13/periodic.cfg b/.kokoro/samples/python3.13/periodic.cfg new file mode 100644 index 000000000..71cd1e597 --- /dev/null +++ b/.kokoro/samples/python3.13/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.13/presubmit.cfg b/.kokoro/samples/python3.13/presubmit.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.13/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.14/common.cfg b/.kokoro/samples/python3.14/common.cfg new file mode 100644 index 000000000..a083385d7 --- /dev/null +++ b/.kokoro/samples/python3.14/common.cfg @@ -0,0 +1,40 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.14" +} + +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-314" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-api-core/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-api-core/.kokoro/trampoline_v2.sh" diff --git a/.kokoro/samples/python3.14/continuous.cfg b/.kokoro/samples/python3.14/continuous.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.14/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.14/periodic-head.cfg b/.kokoro/samples/python3.14/periodic-head.cfg new file mode 100644 index 000000000..a18c0cfc6 --- /dev/null +++ b/.kokoro/samples/python3.14/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-api-core/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.14/periodic.cfg b/.kokoro/samples/python3.14/periodic.cfg new file mode 100644 index 000000000..71cd1e597 --- /dev/null +++ b/.kokoro/samples/python3.14/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} diff --git a/.kokoro/samples/python3.14/presubmit.cfg b/.kokoro/samples/python3.14/presubmit.cfg new file mode 100644 index 000000000..a1c8d9759 --- /dev/null +++ b/.kokoro/samples/python3.14/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh index 63ac41dfa..e9d8bd79a 100755 --- a/.kokoro/test-samples-against-head.sh +++ b/.kokoro/test-samples-against-head.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh index 5a0f5fab6..53e365bc4 100755 --- a/.kokoro/test-samples-impl.sh +++ b/.kokoro/test-samples-impl.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -33,7 +33,8 @@ export PYTHONUNBUFFERED=1 env | grep KOKORO # Install nox -python3.9 -m pip install --upgrade --quiet nox +# `virtualenv==20.26.6` is added for Python 3.7 compatibility +python3.9 -m pip install --upgrade --quiet nox virtualenv==20.26.6 # Use secrets acessor service account to get secrets if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index 50b35a48c..7933d8201 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index d85b1f267..48f796997 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -1,5 +1,5 @@ #!/bin/bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh index 59a7cf3a9..35fa52923 100755 --- a/.kokoro/trampoline_v2.sh +++ b/.kokoro/trampoline_v2.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.librarian/state.yaml b/.librarian/state.yaml new file mode 100644 index 000000000..1a97264b7 --- /dev/null +++ b/.librarian/state.yaml @@ -0,0 +1,10 @@ +image: us-central1-docker.pkg.dev/cloud-sdk-librarian-prod/images-prod/python-librarian-generator@sha256:c8612d3fffb3f6a32353b2d1abd16b61e87811866f7ec9d65b59b02eb452a620 +libraries: + - id: google-api-core + version: 2.28.1 + apis: [] + source_roots: + - . + preserve_regex: [] + remove_regex: [] + tag_format: v{version} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6a8e16950..1d74695f7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/.trampolinerc b/.trampolinerc index a7dfeb42c..008015237 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/CHANGELOG.md b/CHANGELOG.md index cf7e3d202..7b2a256bb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,224 @@ [1]: https://pypi.org/project/google-api-core/#history +## [2.28.1](https://github.com/googleapis/python-api-core/compare/v2.28.0...v2.28.1) (2025-10-28) + + +### Bug Fixes + +* Remove dependency on packaging and pkg_resources ([#852](https://github.com/googleapis/python-api-core/issues/852)) ([ca59a86](https://github.com/googleapis/python-api-core/commit/ca59a863b08a79c2bf0607f9085de1417422820b)) + +## [2.28.0](https://github.com/googleapis/python-api-core/compare/v2.27.0...v2.28.0) (2025-10-24) + + +### Features + +* Provide and use Python version support check ([#832](https://github.com/googleapis/python-api-core/issues/832)) ([d36e896](https://github.com/googleapis/python-api-core/commit/d36e896f98a2371c4d58ce1a7a3bc1a77a081836)) + +## [2.27.0](https://github.com/googleapis/python-api-core/compare/v2.26.0...v2.27.0) (2025-10-22) + + +### Features + +* Support for async bidi streaming apis ([#836](https://github.com/googleapis/python-api-core/issues/836)) ([9530548](https://github.com/googleapis/python-api-core/commit/95305480d234b6dd0903960db020e55125a997e0)) + +## [2.26.0](https://github.com/googleapis/python-api-core/compare/v2.25.2...v2.26.0) (2025-10-08) + + +### Features + +* Add trove classifier for Python 3.14 ([#842](https://github.com/googleapis/python-api-core/issues/842)) ([43690de](https://github.com/googleapis/python-api-core/commit/43690de33a23321d52ab856e2bf253590e1a9357)) + +## [2.25.2](https://github.com/googleapis/python-api-core/compare/v2.25.1...v2.25.2) (2025-10-01) + + +### Bug Fixes + +* Deprecate credentials_file argument ([#841](https://github.com/googleapis/python-api-core/issues/841)) ([324eb74](https://github.com/googleapis/python-api-core/commit/324eb7464d6ade9a8c2e413d4695bc7d7adfcb3d)) +* Fix async tests and round-off error in test expectations ([#837](https://github.com/googleapis/python-api-core/issues/837)) ([14a5978](https://github.com/googleapis/python-api-core/commit/14a59789e144905bd6c82180ad07a52bf1f84f02)) + +## [2.25.1](https://github.com/googleapis/python-api-core/compare/v2.25.0...v2.25.1) (2025-06-02) + + +### Bug Fixes + +* Allow BackgroundConsumer To Inform Caller of Fatal Exceptions with Optional Callback ([3206c01](https://github.com/googleapis/python-api-core/commit/3206c0170dda80a613bf257ebcf3b78c1a20465f)) + +## [2.25.0](https://github.com/googleapis/python-api-core/compare/v2.24.2...v2.25.0) (2025-05-06) + + +### Features + +* Add protobuf runtime version to `x-goog-api-client` header ([#812](https://github.com/googleapis/python-api-core/issues/812)) ([118bd96](https://github.com/googleapis/python-api-core/commit/118bd96f3907234351972409834ab5309cdfcee4)) +* Support dynamic retry backoff values ([#793](https://github.com/googleapis/python-api-core/issues/793)) ([70697a3](https://github.com/googleapis/python-api-core/commit/70697a3e39c389768e724fddacb3c9b97d609384)) + + +### Bug Fixes + +* Resolve issue where pre-release versions of dependencies are installed ([#808](https://github.com/googleapis/python-api-core/issues/808)) ([1ca7973](https://github.com/googleapis/python-api-core/commit/1ca7973a395099403be1a99c7c4583a8f22d5d8e)) + +## [2.24.2](https://github.com/googleapis/python-api-core/compare/v2.24.1...v2.24.2) (2025-03-06) + + +### Bug Fixes + +* **deps:** Allow protobuf 6.x ([#804](https://github.com/googleapis/python-api-core/issues/804)) ([687be7c](https://github.com/googleapis/python-api-core/commit/687be7cbf629a61feb43ef37d3d920fa32b2d636)) + +## [2.24.1](https://github.com/googleapis/python-api-core/compare/v2.24.0...v2.24.1) (2025-01-24) + + +### Bug Fixes + +* Memory leak in bidi classes ([#770](https://github.com/googleapis/python-api-core/issues/770)) ([c1b8afa](https://github.com/googleapis/python-api-core/commit/c1b8afa4e2abe256e70651defccdc285f104ed19)) +* Resolve the issue where rpc timeout of 0 is used when timeout expires ([#776](https://github.com/googleapis/python-api-core/issues/776)) ([a5604a5](https://github.com/googleapis/python-api-core/commit/a5604a55070c6d92618d078191bf99f4c168d5f6)) + + +### Documentation + +* Add warnings regarding consuming externally sourced credentials ([#783](https://github.com/googleapis/python-api-core/issues/783)) ([0ec1825](https://github.com/googleapis/python-api-core/commit/0ec18254b90721684679a98bcacef4615467a227)) + +## [2.24.0](https://github.com/googleapis/python-api-core/compare/v2.23.0...v2.24.0) (2024-12-06) + + +### Features + +* Add automatic logging config to support debug logging ([#754](https://github.com/googleapis/python-api-core/issues/754)) ([d18d9b5](https://github.com/googleapis/python-api-core/commit/d18d9b5131162b44eebcc0859a7aca1198a2ac06)) +* Update recognized logging fields ([#766](https://github.com/googleapis/python-api-core/issues/766)) ([5f80f77](https://github.com/googleapis/python-api-core/commit/5f80f778bc25d878b3187c6138077ad8c6bcd35f)) + +## [2.23.0](https://github.com/googleapis/python-api-core/compare/v2.22.0...v2.23.0) (2024-11-11) + + +### Features + +* Migrate to pyproject.toml ([#736](https://github.com/googleapis/python-api-core/issues/736)) ([159e9a4](https://github.com/googleapis/python-api-core/commit/159e9a49525937f18a55c38136aae32575424d55)) + +## [2.22.0](https://github.com/googleapis/python-api-core/compare/v2.21.0...v2.22.0) (2024-10-25) + + +### Features + +* Add support for python 3.13 ([#696](https://github.com/googleapis/python-api-core/issues/696)) ([46b3d3a](https://github.com/googleapis/python-api-core/commit/46b3d3abaa1bae28e9d788d7c3006224cd6f74d5)) + + +### Bug Fixes + +* Add type hints to ClientOptions ([#735](https://github.com/googleapis/python-api-core/issues/735)) ([b91ed19](https://github.com/googleapis/python-api-core/commit/b91ed19210148dfa49ec790c4dd5f4a7bff80954)) +* Improve `Any` decode error ([#712](https://github.com/googleapis/python-api-core/issues/712)) ([0d5ed37](https://github.com/googleapis/python-api-core/commit/0d5ed37c96f9b40bccae98e228163a88abeb1763)) +* Require proto-plus >= 1.25.0 for Python 3.13 ([#740](https://github.com/googleapis/python-api-core/issues/740)) ([a26313e](https://github.com/googleapis/python-api-core/commit/a26313e1cb12e44aa498f12622edccc0c83ba0c3)) +* Switch to unittest.mock from mock ([#713](https://github.com/googleapis/python-api-core/issues/713)) ([8c53381](https://github.com/googleapis/python-api-core/commit/8c533819b7e212aa2f1d695a7ce08629f4fb2daf)) + +## [2.21.0](https://github.com/googleapis/python-api-core/compare/v2.20.0...v2.21.0) (2024-10-07) + + +### Features + +* Add support for asynchronous long running operations ([#724](https://github.com/googleapis/python-api-core/issues/724)) ([aaed69b](https://github.com/googleapis/python-api-core/commit/aaed69b6f1d694cd7e561e2aa03fdd8d6cfb369a)) + + +### Bug Fixes + +* Set chunk size for async stream content ([#702](https://github.com/googleapis/python-api-core/issues/702)) ([45b8a6d](https://github.com/googleapis/python-api-core/commit/45b8a6db5a5c75acdd8be896d0152f11608c7e51)) + +## [2.20.0](https://github.com/googleapis/python-api-core/compare/v2.19.2...v2.20.0) (2024-09-18) + + +### Features + +* Add async unsupported paramater exception ([#694](https://github.com/googleapis/python-api-core/issues/694)) ([8c137fe](https://github.com/googleapis/python-api-core/commit/8c137feb6e880fdd93d1248d9b6c10002dc3c096)) +* Add support for asynchronous rest streaming ([#686](https://github.com/googleapis/python-api-core/issues/686)) ([1b7bb6d](https://github.com/googleapis/python-api-core/commit/1b7bb6d1b721e4ee1561e8e4a347846d7fdd7c27)) +* Add support for creating exceptions from an asynchronous response ([#688](https://github.com/googleapis/python-api-core/issues/688)) ([1c4b0d0](https://github.com/googleapis/python-api-core/commit/1c4b0d079f2103a7b5562371a7bd1ada92528de3)) + +## [2.19.2](https://github.com/googleapis/python-api-core/compare/v2.19.1...v2.19.2) (2024-08-16) + + +### Bug Fixes + +* Fail gracefully if could not import `rpc_status` module ([#680](https://github.com/googleapis/python-api-core/issues/680)) ([7ccbf57](https://github.com/googleapis/python-api-core/commit/7ccbf5738fa236649f9a155055c71789362b5c4c)) + +## [2.19.1](https://github.com/googleapis/python-api-core/compare/v2.19.0...v2.19.1) (2024-06-19) + + +### Bug Fixes + +* Add support for protobuf 5.x ([#644](https://github.com/googleapis/python-api-core/issues/644)) ([fda0ca6](https://github.com/googleapis/python-api-core/commit/fda0ca6f0664ac5044671591ed62618175a7393f)) +* Ignore unknown fields in rest streaming. ([#651](https://github.com/googleapis/python-api-core/issues/651)) ([1203fb9](https://github.com/googleapis/python-api-core/commit/1203fb97d2685535f89113e944c4764c1deb595e)) + +## [2.19.0](https://github.com/googleapis/python-api-core/compare/v2.18.0...v2.19.0) (2024-04-29) + + +### Features + +* Add google.api_core.version_header ([#638](https://github.com/googleapis/python-api-core/issues/638)) ([a7b53e9](https://github.com/googleapis/python-api-core/commit/a7b53e9e9a7deb88baf92a2827958429e3677069)) + +## [2.18.0](https://github.com/googleapis/python-api-core/compare/v2.17.1...v2.18.0) (2024-03-20) + + +### Features + +* Add common logic for supporting universe domain ([#621](https://github.com/googleapis/python-api-core/issues/621)) ([94f2ca3](https://github.com/googleapis/python-api-core/commit/94f2ca3b4d094e6e10154634d3463d07ebea2035)) + + +### Bug Fixes + +* Add _registered_method to grpc ChannelStub ([#614](https://github.com/googleapis/python-api-core/issues/614)) ([5eaaea8](https://github.com/googleapis/python-api-core/commit/5eaaea8a989f8bdbdb5fbc95a155a20837c87f42)) +* **deps:** Require proto-plus >= 1.22.3 ([#626](https://github.com/googleapis/python-api-core/issues/626)) ([4fed37c](https://github.com/googleapis/python-api-core/commit/4fed37cbc32122f156e38250b5fa8b2b08a787a1)) + +## [2.17.1](https://github.com/googleapis/python-api-core/compare/v2.17.0...v2.17.1) (2024-02-13) + + +### Bug Fixes + +* Resolve issue handling protobuf responses in rest streaming ([#604](https://github.com/googleapis/python-api-core/issues/604)) ([bcebc92](https://github.com/googleapis/python-api-core/commit/bcebc92eca69dae81c5e546d526c92b164a6b3b4)) + +## [2.17.0](https://github.com/googleapis/python-api-core/compare/v2.16.2...v2.17.0) (2024-02-06) + + +### Features + +* Add attempt_direct_path argument to create_channel ([#583](https://github.com/googleapis/python-api-core/issues/583)) ([94726e7](https://github.com/googleapis/python-api-core/commit/94726e739698035b00667983f854c600252abd28)) + + +### Bug Fixes + +* Retry constructors methods support None ([#592](https://github.com/googleapis/python-api-core/issues/592)) ([416203c](https://github.com/googleapis/python-api-core/commit/416203c1888934670bfeccafe5f5469f87314512)) + +## [2.16.2](https://github.com/googleapis/python-api-core/compare/v2.16.1...v2.16.2) (2024-02-02) + + +### Bug Fixes + +* Spelling error `a,out` -> `amount` ([#596](https://github.com/googleapis/python-api-core/issues/596)) ([88688b1](https://github.com/googleapis/python-api-core/commit/88688b1625c4dab0df6124a0560f550eb322500f)) + +## [2.16.1](https://github.com/googleapis/python-api-core/compare/v2.16.0...v2.16.1) (2024-01-30) + + +### Bug Fixes + +* Fix broken import for google.api_core.retry_async.AsyncRetry ([#587](https://github.com/googleapis/python-api-core/issues/587)) ([ac012c0](https://github.com/googleapis/python-api-core/commit/ac012c04c69b8bbe72962f0d0d9e9536c0b4a524)) + +## [2.16.0](https://github.com/googleapis/python-api-core/compare/v2.15.0...v2.16.0) (2024-01-29) + + +### Features + +* Retry and retry_async support streaming rpcs ([#495](https://github.com/googleapis/python-api-core/issues/495)) ([17ff5f1](https://github.com/googleapis/python-api-core/commit/17ff5f1d83a9a6f50a0226fb0e794634bd584f17)) + +## [2.15.0](https://github.com/googleapis/python-api-core/compare/v2.14.0...v2.15.0) (2023-12-07) + + +### Features + +* Add support for Python 3.12 ([#557](https://github.com/googleapis/python-api-core/issues/557)) ([091b4f1](https://github.com/googleapis/python-api-core/commit/091b4f1c7fcc59c3f2a02ee44fd3c30b78423f12)) +* Add type annotations to wrapped grpc calls ([#554](https://github.com/googleapis/python-api-core/issues/554)) ([fc12b40](https://github.com/googleapis/python-api-core/commit/fc12b40bfc6e0c4bb313196e2e3a9c9374ce1c45)) +* Add universe_domain argument to ClientOptions ([3069ef4](https://github.com/googleapis/python-api-core/commit/3069ef4b9123ddb64841cbb7bbb183b53d502e0a)) +* Introduce compatibility with native namespace packages ([#561](https://github.com/googleapis/python-api-core/issues/561)) ([bd82827](https://github.com/googleapis/python-api-core/commit/bd82827108f1eeb6c05cfacf6c044b2afacc18a2)) + + +### Bug Fixes + +* Fix regression in `bidi` causing `Thread-ConsumeBidirectionalStream caught unexpected exception and will exit` ([#562](https://github.com/googleapis/python-api-core/issues/562)) ([40c8ae0](https://github.com/googleapis/python-api-core/commit/40c8ae0cf1f797e31e106461164e22db4fb2d3d9)) +* Replace deprecated `datetime.datetime.utcnow()` ([#552](https://github.com/googleapis/python-api-core/issues/552)) ([448923a](https://github.com/googleapis/python-api-core/commit/448923acf277a70e8704c949311bf4feaef8cab6)), closes [#540](https://github.com/googleapis/python-api-core/issues/540) + ## [2.14.0](https://github.com/googleapis/python-api-core/compare/v2.13.1...v2.14.0) (2023-11-09) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 13d7a5168..0ac24bc08 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -21,7 +21,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: - 3.7, 3.8, 3.9, 3.10, and 3.11 on both UNIX and Windows. + 3.7, 3.8, 3.9, 3.10, 3.11, 3.12, 3.13 and 3.14 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -71,7 +71,7 @@ We use `nox `__ to instrument our tests. - To run a single unit test:: - $ nox -s unit-3.11 -- -k + $ nox -s unit-3.13 -- -k .. note:: @@ -202,12 +202,18 @@ We support: - `Python 3.9`_ - `Python 3.10`_ - `Python 3.11`_ +- `Python 3.12`_ +- `Python 3.13`_ +- `Python 3.14`_ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ .. _Python 3.9: https://docs.python.org/3.9/ .. _Python 3.10: https://docs.python.org/3.10/ .. _Python 3.11: https://docs.python.org/3.11/ +.. _Python 3.12: https://docs.python.org/3.12/ +.. _Python 3.13: https://docs.python.org/3.13/ +.. _Python 3.14: https://docs.python.org/3.14/ Supported versions can be found in our ``noxfile.py`` `config`_. diff --git a/MANIFEST.in b/MANIFEST.in index e0a667053..d6814cd60 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/docs/conf.py b/docs/conf.py index 588e983f9..ad4723c00 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/docs/retry.rst b/docs/retry.rst index 97a7f2ca0..6e165f568 100644 --- a/docs/retry.rst +++ b/docs/retry.rst @@ -10,4 +10,5 @@ Retry in AsyncIO .. automodule:: google.api_core.retry_async :members: + :noindex: :show-inheritance: diff --git a/google/__init__.py b/google/__init__.py deleted file mode 100644 index 9f1d54910..000000000 --- a/google/__init__.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2016 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Google namespace package.""" - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - # See: https://github.com/python/mypy/issues/1422 - __path__ = pkgutil.extend_path(__path__, __name__) # type: ignore diff --git a/google/api_core/__init__.py b/google/api_core/__init__.py index b80ea3726..a52ffe874 100644 --- a/google/api_core/__init__.py +++ b/google/api_core/__init__.py @@ -17,6 +17,25 @@ This package contains common code and utilities used by Google client libraries. """ +from google.api_core import _python_package_support +from google.api_core import _python_version_support from google.api_core import version as api_core_version __version__ = api_core_version.__version__ + +# NOTE: Until dependent artifacts require this version of +# google.api_core, the functionality below must be made available +# manually in those artifacts. + +# expose dependency checks for external callers +check_python_version = _python_version_support.check_python_version +check_dependency_versions = _python_package_support.check_dependency_versions +parse_version_to_tuple = _python_package_support.parse_version_to_tuple +warn_deprecation_for_versions_less_than = ( + _python_package_support.warn_deprecation_for_versions_less_than +) +DependencyConstraint = _python_package_support.DependencyConstraint + +# perform version checks against api_core, and emit warnings if needed +check_python_version(package="google.api_core") +check_dependency_versions("google.api_core") diff --git a/google/api_core/_python_package_support.py b/google/api_core/_python_package_support.py new file mode 100644 index 000000000..06da2bb00 --- /dev/null +++ b/google/api_core/_python_package_support.py @@ -0,0 +1,234 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Code to check versions of dependencies used by Google Cloud Client Libraries.""" + +import warnings +import sys +from typing import Optional, Tuple + +from collections import namedtuple + +from ._python_version_support import ( + _flatten_message, + _get_distribution_and_import_packages, +) + +if sys.version_info >= (3, 8): + from importlib import metadata +else: + # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove + # this code path once we drop support for Python 3.7 + import importlib_metadata as metadata + +ParsedVersion = Tuple[int, ...] + +# Here we list all the packages for which we want to issue warnings +# about deprecated and unsupported versions. +DependencyConstraint = namedtuple( + "DependencyConstraint", + ["package_name", "minimum_fully_supported_version", "recommended_version"], +) +_PACKAGE_DEPENDENCY_WARNINGS = [ + DependencyConstraint( + "google.protobuf", + minimum_fully_supported_version="4.25.8", + recommended_version="6.x", + ) +] + + +DependencyVersion = namedtuple("DependencyVersion", ["version", "version_string"]) +# Version string we provide in a DependencyVersion when we can't determine the version of a +# package. +UNKNOWN_VERSION_STRING = "--" + + +def parse_version_to_tuple(version_string: str) -> ParsedVersion: + """Safely converts a semantic version string to a comparable tuple of integers. + + Example: "4.25.8" -> (4, 25, 8) + Ignores non-numeric parts and handles common version formats. + + Args: + version_string: Version string in the format "x.y.z" or "x.y.z" + + Returns: + Tuple of integers for the parsed version string. + """ + parts = [] + for part in version_string.split("."): + try: + parts.append(int(part)) + except ValueError: + # If it's a non-numeric part (e.g., '1.0.0b1' -> 'b1'), stop here. + # This is a simplification compared to 'packaging.parse_version', but sufficient + # for comparing strictly numeric semantic versions. + break + return tuple(parts) + + +def get_dependency_version( + dependency_name: str, +) -> DependencyVersion: + """Get the parsed version of an installed package dependency. + + This function checks for an installed package and returns its version + as a comparable tuple of integers object for safe comparison. It handles + both modern (Python 3.8+) and legacy (Python 3.7) environments. + + Args: + dependency_name: The distribution name of the package (e.g., 'requests'). + + Returns: + A DependencyVersion namedtuple with `version` (a tuple of integers) and + `version_string` attributes, or `DependencyVersion(None, + UNKNOWN_VERSION_STRING)` if the package is not found or + another error occurs during version discovery. + + """ + try: + version_string: str = metadata.version(dependency_name) + parsed_version = parse_version_to_tuple(version_string) + return DependencyVersion(parsed_version, version_string) + except Exception: + # Catch exceptions from metadata.version() (e.g., PackageNotFoundError) + # or errors during parse_version_to_tuple + return DependencyVersion(None, UNKNOWN_VERSION_STRING) + + +def warn_deprecation_for_versions_less_than( + consumer_import_package: str, + dependency_import_package: str, + minimum_fully_supported_version: str, + recommended_version: Optional[str] = None, + message_template: Optional[str] = None, +): + """Issue any needed deprecation warnings for `dependency_import_package`. + + If `dependency_import_package` is installed at a version less than + `minimum_fully_supported_version`, this issues a warning using either a + default `message_template` or one provided by the user. The + default `message_template` informs the user that they will not receive + future updates for `consumer_import_package` if + `dependency_import_package` is somehow pinned to a version lower + than `minimum_fully_supported_version`. + + Args: + consumer_import_package: The import name of the package that + needs `dependency_import_package`. + dependency_import_package: The import name of the dependency to check. + minimum_fully_supported_version: The dependency_import_package version number + below which a deprecation warning will be logged. + recommended_version: If provided, the recommended next version, which + could be higher than `minimum_fully_supported_version`. + message_template: A custom default message template to replace + the default. This `message_template` is treated as an + f-string, where the following variables are defined: + `dependency_import_package`, `consumer_import_package` and + `dependency_distribution_package` and + `consumer_distribution_package` and `dependency_package`, + `consumer_package` , which contain the import packages, the + distribution packages, and pretty string with both the + distribution and import packages for the dependency and the + consumer, respectively; and `minimum_fully_supported_version`, + `version_used`, and `version_used_string`, which refer to supported + and currently-used versions of the dependency. + + """ + if ( + not consumer_import_package + or not dependency_import_package + or not minimum_fully_supported_version + ): # pragma: NO COVER + return + + dependency_version = get_dependency_version(dependency_import_package) + if not dependency_version.version: + return + + if dependency_version.version < parse_version_to_tuple( + minimum_fully_supported_version + ): + ( + dependency_package, + dependency_distribution_package, + ) = _get_distribution_and_import_packages(dependency_import_package) + ( + consumer_package, + consumer_distribution_package, + ) = _get_distribution_and_import_packages(consumer_import_package) + + recommendation = ( + " (we recommend {recommended_version})" if recommended_version else "" + ) + message_template = message_template or _flatten_message( + """ + DEPRECATION: Package {consumer_package} depends on + {dependency_package}, currently installed at version + {version_used_string}. Future updates to + {consumer_package} will require {dependency_package} at + version {minimum_fully_supported_version} or + higher{recommendation}. Please ensure that either (a) your + Python environment doesn't pin the version of + {dependency_package}, so that updates to + {consumer_package} can require the higher version, or (b) + you manually update your Python environment to use at + least version {minimum_fully_supported_version} of + {dependency_package}. + """ + ) + warnings.warn( + message_template.format( + consumer_import_package=consumer_import_package, + dependency_import_package=dependency_import_package, + consumer_distribution_package=consumer_distribution_package, + dependency_distribution_package=dependency_distribution_package, + dependency_package=dependency_package, + consumer_package=consumer_package, + minimum_fully_supported_version=minimum_fully_supported_version, + recommendation=recommendation, + version_used=dependency_version.version, + version_used_string=dependency_version.version_string, + ), + FutureWarning, + ) + + +def check_dependency_versions( + consumer_import_package: str, *package_dependency_warnings: DependencyConstraint +): + """Bundle checks for all package dependencies. + + This function can be called by all consumers of google.api_core, + to emit needed deprecation warnings for any of their + dependencies. The dependencies to check can be passed as arguments, or if + none are provided, it will default to the list in + `_PACKAGE_DEPENDENCY_WARNINGS`. + + Args: + consumer_import_package: The distribution name of the calling package, whose + dependencies we're checking. + *package_dependency_warnings: A variable number of DependencyConstraint + objects, each specifying a dependency to check. + """ + if not package_dependency_warnings: + package_dependency_warnings = tuple(_PACKAGE_DEPENDENCY_WARNINGS) + for package_info in package_dependency_warnings: + warn_deprecation_for_versions_less_than( + consumer_import_package, + package_info.package_name, + package_info.minimum_fully_supported_version, + recommended_version=package_info.recommended_version, + ) diff --git a/google/api_core/_python_version_support.py b/google/api_core/_python_version_support.py new file mode 100644 index 000000000..d0c0dfe1b --- /dev/null +++ b/google/api_core/_python_version_support.py @@ -0,0 +1,278 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Code to check Python versions supported by Google Cloud Client Libraries.""" + +import datetime +import enum +import logging +import warnings +import sys +import textwrap +from typing import Any, List, NamedTuple, Optional, Dict, Tuple + + +_LOGGER = logging.getLogger(__name__) + + +class PythonVersionStatus(enum.Enum): + """Support status of a Python version in this client library artifact release. + + "Support", in this context, means that this release of a client library + artifact is configured to run on the currently configured version of + Python. + """ + + PYTHON_VERSION_STATUS_UNSPECIFIED = "PYTHON_VERSION_STATUS_UNSPECIFIED" + + PYTHON_VERSION_SUPPORTED = "PYTHON_VERSION_SUPPORTED" + """This Python version is fully supported, so the artifact running on this + version will have all features and bug fixes.""" + + PYTHON_VERSION_DEPRECATED = "PYTHON_VERSION_DEPRECATED" + """This Python version is still supported, but support will end within a + year. At that time, there will be no more releases for this artifact + running under this Python version.""" + + PYTHON_VERSION_EOL = "PYTHON_VERSION_EOL" + """This Python version has reached its end of life in the Python community + (see https://devguide.python.org/versions/), and this artifact will cease + supporting this Python version within the next few releases.""" + + PYTHON_VERSION_UNSUPPORTED = "PYTHON_VERSION_UNSUPPORTED" + """This release of the client library artifact may not be the latest, since + current releases no longer support this Python version.""" + + +class VersionInfo(NamedTuple): + """Hold release and support date information for a Python version.""" + + version: str + python_beta: Optional[datetime.date] + python_start: datetime.date + python_eol: datetime.date + gapic_start: Optional[datetime.date] = None # unused + gapic_deprecation: Optional[datetime.date] = None + gapic_end: Optional[datetime.date] = None + dep_unpatchable_cve: Optional[datetime.date] = None # unused + + +PYTHON_VERSIONS: List[VersionInfo] = [ + # Refer to https://devguide.python.org/versions/ and the PEPs linked therefrom. + VersionInfo( + version="3.7", + python_beta=None, + python_start=datetime.date(2018, 6, 27), + python_eol=datetime.date(2023, 6, 27), + ), + VersionInfo( + version="3.8", + python_beta=None, + python_start=datetime.date(2019, 10, 14), + python_eol=datetime.date(2024, 10, 7), + ), + VersionInfo( + version="3.9", + python_beta=datetime.date(2020, 5, 18), + python_start=datetime.date(2020, 10, 5), + python_eol=datetime.date(2025, 10, 5), + gapic_end=datetime.date(2025, 10, 5) + datetime.timedelta(days=90), + ), + VersionInfo( + version="3.10", + python_beta=datetime.date(2021, 5, 3), + python_start=datetime.date(2021, 10, 4), + python_eol=datetime.date(2026, 10, 4), # TODO: specify day when announced + ), + VersionInfo( + version="3.11", + python_beta=datetime.date(2022, 5, 8), + python_start=datetime.date(2022, 10, 24), + python_eol=datetime.date(2027, 10, 24), # TODO: specify day when announced + ), + VersionInfo( + version="3.12", + python_beta=datetime.date(2023, 5, 22), + python_start=datetime.date(2023, 10, 2), + python_eol=datetime.date(2028, 10, 2), # TODO: specify day when announced + ), + VersionInfo( + version="3.13", + python_beta=datetime.date(2024, 5, 8), + python_start=datetime.date(2024, 10, 7), + python_eol=datetime.date(2029, 10, 7), # TODO: specify day when announced + ), + VersionInfo( + version="3.14", + python_beta=datetime.date(2025, 5, 7), + python_start=datetime.date(2025, 10, 7), + python_eol=datetime.date(2030, 10, 7), # TODO: specify day when announced + ), +] + +PYTHON_VERSION_INFO: Dict[Tuple[int, int], VersionInfo] = {} +for info in PYTHON_VERSIONS: + major, minor = map(int, info.version.split(".")) + PYTHON_VERSION_INFO[(major, minor)] = info + + +LOWEST_TRACKED_VERSION = min(PYTHON_VERSION_INFO.keys()) +_FAKE_PAST_DATE = datetime.date.min + datetime.timedelta(days=900) +_FAKE_PAST_VERSION = VersionInfo( + version="0.0", + python_beta=_FAKE_PAST_DATE, + python_start=_FAKE_PAST_DATE, + python_eol=_FAKE_PAST_DATE, +) +_FAKE_FUTURE_DATE = datetime.date.max - datetime.timedelta(days=900) +_FAKE_FUTURE_VERSION = VersionInfo( + version="999.0", + python_beta=_FAKE_FUTURE_DATE, + python_start=_FAKE_FUTURE_DATE, + python_eol=_FAKE_FUTURE_DATE, +) +DEPRECATION_WARNING_PERIOD = datetime.timedelta(days=365) +EOL_GRACE_PERIOD = datetime.timedelta(weeks=1) + + +def _flatten_message(text: str) -> str: + """Dedent a multi-line string and flatten it into a single line.""" + return " ".join(textwrap.dedent(text).strip().split()) + + +# TODO(https://github.com/googleapis/python-api-core/issues/835): +# Remove once we no longer support Python 3.9. +# `importlib.metadata.packages_distributions()` is only supported in Python 3.10 and newer +# https://docs.python.org/3/library/importlib.metadata.html#importlib.metadata.packages_distributions +if sys.version_info < (3, 10): + + def _get_pypi_package_name(module_name): # pragma: NO COVER + """Determine the PyPI package name for a given module name.""" + return None + +else: + from importlib import metadata + + def _get_pypi_package_name(module_name): + """Determine the PyPI package name for a given module name.""" + try: + # Get the mapping of modules to distributions + module_to_distributions = metadata.packages_distributions() + + # Check if the module is found in the mapping + if module_name in module_to_distributions: # pragma: NO COVER + # The value is a list of distribution names, take the first one + return module_to_distributions[module_name][0] + except Exception as e: # pragma: NO COVER + _LOGGER.info( + "An error occurred while determining PyPI package name for %s: %s", + module_name, + e, + ) + + return None + + +def _get_distribution_and_import_packages(import_package: str) -> Tuple[str, Any]: + """Return a pretty string with distribution & import package names.""" + distribution_package = _get_pypi_package_name(import_package) + dependency_distribution_and_import_packages = ( + f"package {distribution_package} ({import_package})" + if distribution_package + else import_package + ) + return dependency_distribution_and_import_packages, distribution_package + + +def check_python_version( + package: str = "this package", today: Optional[datetime.date] = None +) -> PythonVersionStatus: + """Check the running Python version and issue a support warning if needed. + + Args: + today: The date to check against. Defaults to the current date. + + Returns: + The support status of the current Python version. + """ + today = today or datetime.date.today() + package_label, _ = _get_distribution_and_import_packages(package) + + python_version = sys.version_info + version_tuple = (python_version.major, python_version.minor) + py_version_str = sys.version.split()[0] + + version_info = PYTHON_VERSION_INFO.get(version_tuple) + + if not version_info: + if version_tuple < LOWEST_TRACKED_VERSION: + version_info = _FAKE_PAST_VERSION + else: + version_info = _FAKE_FUTURE_VERSION + + gapic_deprecation = version_info.gapic_deprecation or ( + version_info.python_eol - DEPRECATION_WARNING_PERIOD + ) + gapic_end = version_info.gapic_end or (version_info.python_eol + EOL_GRACE_PERIOD) + + def min_python(date: datetime.date) -> str: + """Find the minimum supported Python version for a given date.""" + for version, info in sorted(PYTHON_VERSION_INFO.items()): + if info.python_start <= date < info.python_eol: + return f"{version[0]}.{version[1]}" + return "at a currently supported version [https://devguide.python.org/versions]" + + if gapic_end < today: + message = _flatten_message( + f""" + You are using a non-supported Python version ({py_version_str}). + Google will not post any further updates to {package_label} + supporting this Python version. Please upgrade to the latest Python + version, or at least Python {min_python(today)}, and then update + {package_label}. + """ + ) + warnings.warn(message, FutureWarning) + return PythonVersionStatus.PYTHON_VERSION_UNSUPPORTED + + eol_date = version_info.python_eol + EOL_GRACE_PERIOD + if eol_date <= today <= gapic_end: + message = _flatten_message( + f""" + You are using a Python version ({py_version_str}) + past its end of life. Google will update {package_label} + with critical bug fixes on a best-effort basis, but not + with any other fixes or features. Please upgrade + to the latest Python version, or at least Python + {min_python(today)}, and then update {package_label}. + """ + ) + warnings.warn(message, FutureWarning) + return PythonVersionStatus.PYTHON_VERSION_EOL + + if gapic_deprecation <= today <= gapic_end: + message = _flatten_message( + f""" + You are using a Python version ({py_version_str}) which Google will + stop supporting in new releases of {package_label} once it reaches + its end of life ({version_info.python_eol}). Please upgrade to the + latest Python version, or at least Python + {min_python(version_info.python_eol)}, to continue receiving updates + for {package_label} past that date. + """ + ) + warnings.warn(message, FutureWarning) + return PythonVersionStatus.PYTHON_VERSION_DEPRECATED + + return PythonVersionStatus.PYTHON_VERSION_SUPPORTED diff --git a/google/api_core/_rest_streaming_base.py b/google/api_core/_rest_streaming_base.py new file mode 100644 index 000000000..3bc87a963 --- /dev/null +++ b/google/api_core/_rest_streaming_base.py @@ -0,0 +1,118 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for server-side streaming in REST.""" + +from collections import deque +import string +from typing import Deque, Union +import types + +import proto +import google.protobuf.message +from google.protobuf.json_format import Parse + + +class BaseResponseIterator: + """Base Iterator over REST API responses. This class should not be used directly. + + Args: + response_message_cls (Union[proto.Message, google.protobuf.message.Message]): A response + class expected to be returned from an API. + + Raises: + ValueError: If `response_message_cls` is not a subclass of `proto.Message` or `google.protobuf.message.Message`. + """ + + def __init__( + self, + response_message_cls: Union[proto.Message, google.protobuf.message.Message], + ): + self._response_message_cls = response_message_cls + # Contains a list of JSON responses ready to be sent to user. + self._ready_objs: Deque[str] = deque() + # Current JSON response being built. + self._obj = "" + # Keeps track of the nesting level within a JSON object. + self._level = 0 + # Keeps track whether HTTP response is currently sending values + # inside of a string value. + self._in_string = False + # Whether an escape symbol "\" was encountered. + self._escape_next = False + + self._grab = types.MethodType(self._create_grab(), self) + + def _process_chunk(self, chunk: str): + if self._level == 0: + if chunk[0] != "[": + raise ValueError( + "Can only parse array of JSON objects, instead got %s" % chunk + ) + for char in chunk: + if char == "{": + if self._level == 1: + # Level 1 corresponds to the outermost JSON object + # (i.e. the one we care about). + self._obj = "" + if not self._in_string: + self._level += 1 + self._obj += char + elif char == "}": + self._obj += char + if not self._in_string: + self._level -= 1 + if not self._in_string and self._level == 1: + self._ready_objs.append(self._obj) + elif char == '"': + # Helps to deal with an escaped quotes inside of a string. + if not self._escape_next: + self._in_string = not self._in_string + self._obj += char + elif char in string.whitespace: + if self._in_string: + self._obj += char + elif char == "[": + if self._level == 0: + self._level += 1 + else: + self._obj += char + elif char == "]": + if self._level == 1: + self._level -= 1 + else: + self._obj += char + else: + self._obj += char + self._escape_next = not self._escape_next if char == "\\" else False + + def _create_grab(self): + if issubclass(self._response_message_cls, proto.Message): + + def grab(this): + return this._response_message_cls.from_json( + this._ready_objs.popleft(), ignore_unknown_fields=True + ) + + return grab + elif issubclass(self._response_message_cls, google.protobuf.message.Message): + + def grab(this): + return Parse(this._ready_objs.popleft(), this._response_message_cls()) + + return grab + else: + raise ValueError( + "Response message class must be a subclass of proto.Message or google.protobuf.message.Message." + ) diff --git a/google/api_core/bidi.py b/google/api_core/bidi.py index 74abc4956..7f45c2af1 100644 --- a/google/api_core/bidi.py +++ b/google/api_core/bidi.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -"""Bi-directional streaming RPC helpers.""" +"""Helpers for synchronous bidirectional streaming RPCs.""" import collections import datetime @@ -22,6 +22,7 @@ import time from google.api_core import exceptions +from google.api_core.bidi_base import BidiRpcBase _LOGGER = logging.getLogger(__name__) _BIDIRECTIONAL_CONSUMER_NAME = "Thread-ConsumeBidirectionalStream" @@ -36,21 +37,6 @@ class _RequestQueueGenerator(object): otherwise open-ended set of requests to send through a request-streaming (or bidirectional) RPC. - The reason this is necessary is because gRPC takes an iterator as the - request for request-streaming RPCs. gRPC consumes this iterator in another - thread to allow it to block while generating requests for the stream. - However, if the generator blocks indefinitely gRPC will not be able to - clean up the thread as it'll be blocked on `next(iterator)` and not be able - to check the channel status to stop iterating. This helper mitigates that - by waiting on the queue with a timeout and checking the RPC state before - yielding. - - Finally, it allows for retrying without swapping queues because if it does - pull an item off the queue when the RPC is inactive, it'll immediately put - it back and then exit. This is necessary because yielding the item in this - case will cause gRPC to discard it. In practice, this means that the order - of messages is not guaranteed. If such a thing is necessary it would be - easy to use a priority queue. Example:: @@ -62,12 +48,6 @@ class _RequestQueueGenerator(object): print(response) q.put(...) - Note that it is possible to accomplish this behavior without "spinning" - (using a queue timeout). One possible way would be to use more threads to - multiplex the grpc end event with the queue, another possible way is to - use selectors and a custom event/queue object. Both of these approaches - are significant from an engineering perspective for small benefit - the - CPU consumed by spinning is pretty minuscule. Args: queue (queue_module.Queue): The request queue. @@ -91,10 +71,36 @@ def __init__(self, queue, period=1, initial_request=None): def _is_active(self): # Note: there is a possibility that this starts *before* the call # property is set. So we have to check if self.call is set before - # seeing if it's active. - return self.call is not None and self.call.is_active() + # seeing if it's active. We need to return True if self.call is None. + # See https://github.com/googleapis/python-api-core/issues/560. + return self.call is None or self.call.is_active() def __iter__(self): + # The reason this is necessary is because gRPC takes an iterator as the + # request for request-streaming RPCs. gRPC consumes this iterator in + # another thread to allow it to block while generating requests for + # the stream. However, if the generator blocks indefinitely gRPC will + # not be able to clean up the thread as it'll be blocked on + # `next(iterator)` and not be able to check the channel status to stop + # iterating. This helper mitigates that by waiting on the queue with + # a timeout and checking the RPC state before yielding. + # + # Finally, it allows for retrying without swapping queues because if + # it does pull an item off the queue when the RPC is inactive, it'll + # immediately put it back and then exit. This is necessary because + # yielding the item in this case will cause gRPC to discard it. In + # practice, this means that the order of messages is not guaranteed. + # If such a thing is necessary it would be easy to use a priority + # queue. + # + # Note that it is possible to accomplish this behavior without + # "spinning" (using a queue timeout). One possible way would be to use + # more threads to multiplex the grpc end event with the queue, another + # possible way is to use selectors and a custom event/queue object. + # Both of these approaches are significant from an engineering + # perspective for small benefit - the CPU consumed by spinning is + # pretty minuscule. + if self._initial_request is not None: if callable(self._initial_request): yield self._initial_request() @@ -200,7 +206,7 @@ def __repr__(self): ) -class BidiRpc(object): +class BidiRpc(BidiRpcBase): """A helper for consuming a bi-directional streaming RPC. This maps gRPC's built-in interface which uses a request iterator and a @@ -226,6 +232,8 @@ class BidiRpc(object): rpc.send(example_pb2.StreamingRpcRequest( data='example')) + rpc.close() + This does *not* retry the stream on errors. See :class:`ResumableBidiRpc`. Args: @@ -239,40 +247,14 @@ class BidiRpc(object): the request. """ - def __init__(self, start_rpc, initial_request=None, metadata=None): - self._start_rpc = start_rpc - self._initial_request = initial_request - self._rpc_metadata = metadata - self._request_queue = queue_module.Queue() - self._request_generator = None - self._is_active = False - self._callbacks = [] - self.call = None - - def add_done_callback(self, callback): - """Adds a callback that will be called when the RPC terminates. - - This occurs when the RPC errors or is successfully terminated. - - Args: - callback (Callable[[grpc.Future], None]): The callback to execute. - It will be provided with the same gRPC future as the underlying - stream which will also be a :class:`grpc.Call`. - """ - self._callbacks.append(callback) - - def _on_call_done(self, future): - # This occurs when the RPC errors or is successfully terminated. - # Note that grpc's "future" here can also be a grpc.RpcError. - # See note in https://github.com/grpc/grpc/issues/10885#issuecomment-302651331 - # that `grpc.RpcError` is also `grpc.call`. - for callback in self._callbacks: - callback(future) + def _create_queue(self): + """Create a queue for requests.""" + return queue_module.Queue() def open(self): """Opens the stream.""" if self.is_active: - raise ValueError("Can not open an already open stream.") + raise ValueError("Cannot open an already open stream.") request_generator = _RequestQueueGenerator( self._request_queue, initial_request=self._initial_request @@ -299,12 +281,14 @@ def open(self): def close(self): """Closes the stream.""" - if self.call is None: - return + if self.call is not None: + self.call.cancel() + # Put None in request queue to signal termination. self._request_queue.put(None) - self.call.cancel() self._request_generator = None + self._initial_request = None + self._callbacks = [] # Don't set self.call to None. Keep it around so that send/recv can # raise the error. @@ -319,7 +303,7 @@ def send(self, request): request (protobuf.Message): The request to send. """ if self.call is None: - raise ValueError("Can not send() on an RPC that has never been open()ed.") + raise ValueError("Cannot send on an RPC stream that has never been opened.") # Don't use self.is_active(), as ResumableBidiRpc will overload it # to mean something semantically different. @@ -340,20 +324,15 @@ def recv(self): protobuf.Message: The received message. """ if self.call is None: - raise ValueError("Can not recv() on an RPC that has never been open()ed.") + raise ValueError("Cannot recv on an RPC stream that has never been opened.") return next(self.call) @property def is_active(self): - """bool: True if this stream is currently open and active.""" + """True if this stream is currently open and active.""" return self.call is not None and self.call.is_active() - @property - def pending_requests(self): - """int: Returns an estimate of the number of queued requests.""" - return self._request_queue.qsize() - def _never_terminate(future_or_error): """By default, no errors cause BiDi termination.""" @@ -541,7 +520,7 @@ def _send(self, request): call = self.call if call is None: - raise ValueError("Can not send() on an RPC that has never been open()ed.") + raise ValueError("Cannot send on an RPC that has never been opened.") # Don't use self.is_active(), as ResumableBidiRpc will overload it # to mean something semantically different. @@ -560,7 +539,7 @@ def _recv(self): call = self.call if call is None: - raise ValueError("Can not recv() on an RPC that has never been open()ed.") + raise ValueError("Cannot recv on an RPC that has never been opened.") return next(call) @@ -621,12 +600,15 @@ def on_response(response): ``open()``ed yet. on_response (Callable[[protobuf.Message], None]): The callback to be called for every response on the stream. + on_fatal_exception (Callable[[Exception], None]): The callback to + be called on fatal errors during consumption. Default None. """ - def __init__(self, bidi_rpc, on_response): + def __init__(self, bidi_rpc, on_response, on_fatal_exception=None): self._bidi_rpc = bidi_rpc self._on_response = on_response self._paused = False + self._on_fatal_exception = on_fatal_exception self._wake = threading.Condition() self._thread = None self._operational_lock = threading.Lock() @@ -661,7 +643,8 @@ def _thread_main(self, ready): _LOGGER.debug("waiting for recv.") response = self._bidi_rpc.recv() _LOGGER.debug("recved response.") - self._on_response(response) + if self._on_response is not None: + self._on_response(response) except exceptions.GoogleAPICallError as exc: _LOGGER.debug( @@ -672,6 +655,8 @@ def _thread_main(self, ready): exc, exc_info=True, ) + if self._on_fatal_exception is not None: + self._on_fatal_exception(exc) except Exception as exc: _LOGGER.exception( @@ -679,6 +664,8 @@ def _thread_main(self, ready): _BIDIRECTIONAL_CONSUMER_NAME, exc, ) + if self._on_fatal_exception is not None: + self._on_fatal_exception(exc) _LOGGER.info("%s exiting", _BIDIRECTIONAL_CONSUMER_NAME) @@ -690,8 +677,8 @@ def start(self): name=_BIDIRECTIONAL_CONSUMER_NAME, target=self._thread_main, args=(ready,), + daemon=True, ) - thread.daemon = True thread.start() # Other parts of the code rely on `thread.is_alive` which # isn't sufficient to know if a thread is active, just that it may @@ -702,7 +689,11 @@ def start(self): _LOGGER.debug("Started helper thread %s", thread.name) def stop(self): - """Stop consuming the stream and shutdown the background thread.""" + """Stop consuming the stream and shutdown the background thread. + + NOTE: Cannot be called within `_thread_main`, since it is not + possible to join a thread to itself. + """ with self._operational_lock: self._bidi_rpc.close() @@ -716,6 +707,8 @@ def stop(self): _LOGGER.warning("Background thread did not exit.") self._thread = None + self._on_response = None + self._on_fatal_exception = None @property def is_active(self): diff --git a/google/api_core/bidi_async.py b/google/api_core/bidi_async.py new file mode 100644 index 000000000..3770f69dd --- /dev/null +++ b/google/api_core/bidi_async.py @@ -0,0 +1,244 @@ +# Copyright 2025, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Asynchronous bi-directional streaming RPC helpers.""" + +import asyncio +import logging +from typing import Callable, Optional, Union + +from grpc import aio + +from google.api_core import exceptions +from google.api_core.bidi_base import BidiRpcBase + +from google.protobuf.message import Message as ProtobufMessage + + +_LOGGER = logging.getLogger(__name__) + + +class _AsyncRequestQueueGenerator: + """_AsyncRequestQueueGenerator is a helper class for sending asynchronous + requests to a gRPC stream from a Queue. + + This generator takes asynchronous requests off a given `asyncio.Queue` and + yields them to gRPC. + + It's useful when you have an indeterminate, indefinite, or otherwise + open-ended set of requests to send through a request-streaming (or + bidirectional) RPC. + + Example:: + + requests = _AsyncRequestQueueGenerator(q) + call = await stub.StreamingRequest(requests) + requests.call = call + + async for response in call: + print(response) + await q.put(...) + + Args: + queue (asyncio.Queue): The request queue. + initial_request (Union[ProtobufMessage, + Callable[[], ProtobufMessage]]): The initial request to + yield. This is done independently of the request queue to allow for + easily restarting streams that require some initial configuration + request. + """ + + def __init__( + self, + queue: asyncio.Queue, + initial_request: Optional[ + Union[ProtobufMessage, Callable[[], ProtobufMessage]] + ] = None, + ) -> None: + self._queue = queue + self._initial_request = initial_request + self.call: Optional[aio.Call] = None + + def _is_active(self) -> bool: + """Returns true if the call is not set or not completed.""" + # Note: there is a possibility that this starts *before* the call + # property is set. So we have to check if self.call is set before + # seeing if it's active. We need to return True if self.call is None. + # See https://github.com/googleapis/python-api-core/issues/560. + return self.call is None or not self.call.done() + + async def __aiter__(self): + # The reason this is necessary is because it lets the user have + # control on when they would want to send requests proto messages + # instead of sending all of them initially. + # + # This is achieved via asynchronous queue (asyncio.Queue), + # gRPC awaits until there's a message in the queue. + # + # Finally, it allows for retrying without swapping queues because if + # it does pull an item off the queue when the RPC is inactive, it'll + # immediately put it back and then exit. This is necessary because + # yielding the item in this case will cause gRPC to discard it. In + # practice, this means that the order of messages is not guaranteed. + # If preserving order is necessary it would be easy to use a priority + # queue. + if self._initial_request is not None: + if callable(self._initial_request): + yield self._initial_request() + else: + yield self._initial_request + + while True: + item = await self._queue.get() + + # The consumer explicitly sent "None", indicating that the request + # should end. + if item is None: + _LOGGER.debug("Cleanly exiting request generator.") + return + + if not self._is_active(): + # We have an item, but the call is closed. We should put the + # item back on the queue so that the next call can consume it. + await self._queue.put(item) + _LOGGER.debug( + "Inactive call, replacing item on queue and exiting " + "request generator." + ) + return + + yield item + + +class AsyncBidiRpc(BidiRpcBase): + """A helper for consuming a async bi-directional streaming RPC. + + This maps gRPC's built-in interface which uses a request iterator and a + response iterator into a socket-like :func:`send` and :func:`recv`. This + is a more useful pattern for long-running or asymmetric streams (streams + where there is not a direct correlation between the requests and + responses). + + Example:: + + initial_request = example_pb2.StreamingRpcRequest( + setting='example') + rpc = AsyncBidiRpc( + stub.StreamingRpc, + initial_request=initial_request, + metadata=[('name', 'value')] + ) + + await rpc.open() + + while rpc.is_active: + print(await rpc.recv()) + await rpc.send(example_pb2.StreamingRpcRequest( + data='example')) + + await rpc.close() + + This does *not* retry the stream on errors. + + Args: + start_rpc (grpc.aio.StreamStreamMultiCallable): The gRPC method used to + start the RPC. + initial_request (Union[ProtobufMessage, + Callable[[], ProtobufMessage]]): The initial request to + yield. This is useful if an initial request is needed to start the + stream. + metadata (Sequence[Tuple(str, str)]): RPC metadata to include in + the request. + """ + + def _create_queue(self) -> asyncio.Queue: + """Create a queue for requests.""" + return asyncio.Queue() + + async def open(self) -> None: + """Opens the stream.""" + if self.is_active: + raise ValueError("Cannot open an already open stream.") + + request_generator = _AsyncRequestQueueGenerator( + self._request_queue, initial_request=self._initial_request + ) + try: + call = await self._start_rpc(request_generator, metadata=self._rpc_metadata) + except exceptions.GoogleAPICallError as exc: + # The original `grpc.aio.AioRpcError` (which is usually also a + # `grpc.aio.Call`) is available from the ``response`` property on + # the mapped exception. + self._on_call_done(exc.response) + raise + + request_generator.call = call + + # TODO: api_core should expose the future interface for wrapped + # callables as well. + if hasattr(call, "_wrapped"): # pragma: NO COVER + call._wrapped.add_done_callback(self._on_call_done) + else: + call.add_done_callback(self._on_call_done) + + self._request_generator = request_generator + self.call = call + + async def close(self) -> None: + """Closes the stream.""" + if self.call is not None: + self.call.cancel() + + # Put None in request queue to signal termination. + await self._request_queue.put(None) + self._request_generator = None + self._initial_request = None + self._callbacks = [] + # Don't set self.call to None. Keep it around so that send/recv can + # raise the error. + + async def send(self, request: ProtobufMessage) -> None: + """Queue a message to be sent on the stream. + + If the underlying RPC has been closed, this will raise. + + Args: + request (ProtobufMessage): The request to send. + """ + if self.call is None: + raise ValueError("Cannot send on an RPC stream that has never been opened.") + + if not self.call.done(): + await self._request_queue.put(request) + else: + # calling read should cause the call to raise. + await self.call.read() + + async def recv(self) -> ProtobufMessage: + """Wait for a message to be returned from the stream. + + If the underlying RPC has been closed, this will raise. + + Returns: + ProtobufMessage: The received message. + """ + if self.call is None: + raise ValueError("Cannot recv on an RPC stream that has never been opened.") + + return await self.call.read() + + @property + def is_active(self) -> bool: + """Whether the stream is currently open and active.""" + return self.call is not None and not self.call.done() diff --git a/google/api_core/bidi_base.py b/google/api_core/bidi_base.py new file mode 100644 index 000000000..9288fda41 --- /dev/null +++ b/google/api_core/bidi_base.py @@ -0,0 +1,88 @@ +# Copyright 2025, Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# You may obtain a copy of the License at +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Base class for bi-directional streaming RPC helpers.""" + + +class BidiRpcBase: + """A base class for consuming a bi-directional streaming RPC. + + This maps gRPC's built-in interface which uses a request iterator and a + response iterator into a socket-like :func:`send` and :func:`recv`. This + is a more useful pattern for long-running or asymmetric streams (streams + where there is not a direct correlation between the requests and + responses). + + This does *not* retry the stream on errors. + + Args: + start_rpc (Union[grpc.StreamStreamMultiCallable, + grpc.aio.StreamStreamMultiCallable]): The gRPC method used + to start the RPC. + initial_request (Union[protobuf.Message, + Callable[[], protobuf.Message]]): The initial request to + yield. This is useful if an initial request is needed to start the + stream. + metadata (Sequence[Tuple(str, str)]): RPC metadata to include in + the request. + """ + + def __init__(self, start_rpc, initial_request=None, metadata=None): + self._start_rpc = start_rpc + self._initial_request = initial_request + self._rpc_metadata = metadata + self._request_queue = self._create_queue() + self._request_generator = None + self._callbacks = [] + self.call = None + + def _create_queue(self): + """Create a queue for requests.""" + raise NotImplementedError("`_create_queue` is not implemented.") + + def add_done_callback(self, callback): + """Adds a callback that will be called when the RPC terminates. + + This occurs when the RPC errors or is successfully terminated. + + Args: + callback (Union[Callable[[grpc.Future], None], Callable[[Any], None]]): + The callback to execute after gRPC call completed (success or + failure). + + For sync streaming gRPC: Callable[[grpc.Future], None] + + For async streaming gRPC: Callable[[Any], None] + """ + self._callbacks.append(callback) + + def _on_call_done(self, future): + # This occurs when the RPC errors or is successfully terminated. + # Note that grpc's "future" here can also be a grpc.RpcError. + # See note in https://github.com/grpc/grpc/issues/10885#issuecomment-302651331 + # that `grpc.RpcError` is also `grpc.Call`. + # for asynchronous gRPC call it would be `grpc.aio.AioRpcError` + + # Note: sync callbacks can be limiting for async code, because you can't + # await anything in a sync callback. + for callback in self._callbacks: + callback(future) + + @property + def is_active(self): + """True if the gRPC call is not done yet.""" + raise NotImplementedError("`is_active` is not implemented.") + + @property + def pending_requests(self): + """Estimate of the number of queued requests.""" + return self._request_queue.qsize() diff --git a/google/api_core/client_info.py b/google/api_core/client_info.py index 483267997..f0678d24b 100644 --- a/google/api_core/client_info.py +++ b/google/api_core/client_info.py @@ -57,7 +57,9 @@ class ClientInfo(object): user_agent (Optional[str]): Prefix to the user agent header. This is used to supply information such as application name or partner tool. Recommended format: ``application-or-tool-ID/major.minor.version``. - rest_version (Optional[str]): The requests library version. + rest_version (Optional[str]): A string with labeled versions of the + dependencies used for REST transport. + protobuf_runtime_version (Optional[str]): The protobuf runtime version. """ def __init__( @@ -69,6 +71,7 @@ def __init__( client_library_version=None, user_agent=None, rest_version=None, + protobuf_runtime_version=None, ): self.python_version = python_version self.grpc_version = grpc_version @@ -77,6 +80,7 @@ def __init__( self.client_library_version = client_library_version self.user_agent = user_agent self.rest_version = rest_version + self.protobuf_runtime_version = protobuf_runtime_version def to_user_agent(self): """Returns the user-agent string for this client info.""" @@ -104,4 +108,7 @@ def to_user_agent(self): if self.client_library_version is not None: ua += "gccl/{client_library_version} " + if self.protobuf_runtime_version is not None: + ua += "pb/{protobuf_runtime_version} " + return ua.format(**self.__dict__).strip() diff --git a/google/api_core/client_logging.py b/google/api_core/client_logging.py new file mode 100644 index 000000000..837e3e0c4 --- /dev/null +++ b/google/api_core/client_logging.py @@ -0,0 +1,144 @@ +import logging +import json +import os + +from typing import List, Optional + +_LOGGING_INITIALIZED = False +_BASE_LOGGER_NAME = "google" + +# Fields to be included in the StructuredLogFormatter. +# +# TODO(https://github.com/googleapis/python-api-core/issues/761): Update this list to support additional logging fields. +_recognized_logging_fields = [ + "httpRequest", + "rpcName", + "serviceName", + "credentialsType", + "credentialsInfo", + "universeDomain", + "request", + "response", + "metadata", + "retryAttempt", + "httpResponse", +] # Additional fields to be Logged. + + +def logger_configured(logger) -> bool: + """Determines whether `logger` has non-default configuration + + Args: + logger: The logger to check. + + Returns: + bool: Whether the logger has any non-default configuration. + """ + return ( + logger.handlers != [] or logger.level != logging.NOTSET or not logger.propagate + ) + + +def initialize_logging(): + """Initializes "google" loggers, partly based on the environment variable + + Initializes the "google" logger and any loggers (at the "google" + level or lower) specified by the environment variable + GOOGLE_SDK_PYTHON_LOGGING_SCOPE, as long as none of these loggers + were previously configured. If any such loggers (including the + "google" logger) are initialized, they are set to NOT propagate + log events up to their parent loggers. + + This initialization is executed only once, and hence the + environment variable is only processed the first time this + function is called. + """ + global _LOGGING_INITIALIZED + if _LOGGING_INITIALIZED: + return + scopes = os.getenv("GOOGLE_SDK_PYTHON_LOGGING_SCOPE", "") + setup_logging(scopes) + _LOGGING_INITIALIZED = True + + +def parse_logging_scopes(scopes: Optional[str] = None) -> List[str]: + """Returns a list of logger names. + + Splits the single string of comma-separated logger names into a list of individual logger name strings. + + Args: + scopes: The name of a single logger. (In the future, this will be a comma-separated list of multiple loggers.) + + Returns: + A list of all the logger names in scopes. + """ + if not scopes: + return [] + # TODO(https://github.com/googleapis/python-api-core/issues/759): check if the namespace is a valid namespace. + # TODO(b/380481951): Support logging multiple scopes. + # TODO(b/380483756): Raise or log a warning for an invalid scope. + namespaces = [scopes] + return namespaces + + +def configure_defaults(logger): + """Configures `logger` to emit structured info to stdout.""" + if not logger_configured(logger): + console_handler = logging.StreamHandler() + logger.setLevel("DEBUG") + logger.propagate = False + formatter = StructuredLogFormatter() + console_handler.setFormatter(formatter) + logger.addHandler(console_handler) + + +def setup_logging(scopes: str = ""): + """Sets up logging for the specified `scopes`. + + If the loggers specified in `scopes` have not been previously + configured, this will configure them to emit structured log + entries to stdout, and to not propagate their log events to their + parent loggers. Additionally, if the "google" logger (whether it + was specified in `scopes` or not) was not previously configured, + it will also configure it to not propagate log events to the root + logger. + + Args: + scopes: The name of a single logger. (In the future, this will be a comma-separated list of multiple loggers.) + + """ + + # only returns valid logger scopes (namespaces) + # this list has at most one element. + logger_names = parse_logging_scopes(scopes) + + for namespace in logger_names: + # This will either create a module level logger or get the reference of the base logger instantiated above. + logger = logging.getLogger(namespace) + + # Configure default settings. + configure_defaults(logger) + + # disable log propagation at base logger level to the root logger only if a base logger is not already configured via code changes. + base_logger = logging.getLogger(_BASE_LOGGER_NAME) + if not logger_configured(base_logger): + base_logger.propagate = False + + +# TODO(https://github.com/googleapis/python-api-core/issues/763): Expand documentation. +class StructuredLogFormatter(logging.Formatter): + # TODO(https://github.com/googleapis/python-api-core/issues/761): ensure that additional fields such as + # function name, file name, and line no. appear in a log output. + def format(self, record: logging.LogRecord): + log_obj = { + "timestamp": self.formatTime(record), + "severity": record.levelname, + "name": record.name, + "message": record.getMessage(), + } + + for field_name in _recognized_logging_fields: + value = getattr(record, field_name, None) + if value is not None: + log_obj[field_name] = value + return json.dumps(log_obj) diff --git a/google/api_core/client_options.py b/google/api_core/client_options.py index ee9f28a99..30bff4824 100644 --- a/google/api_core/client_options.py +++ b/google/api_core/client_options.py @@ -48,6 +48,11 @@ def get_client_cert(): """ +from typing import Callable, Mapping, Optional, Sequence, Tuple +import warnings + +from google.api_core import general_helpers + class ClientOptions(object): """Client Options used to set options on clients. @@ -55,18 +60,31 @@ class ClientOptions(object): Args: api_endpoint (Optional[str]): The desired API endpoint, e.g., compute.googleapis.com - client_cert_source (Optional[Callable[[], (bytes, bytes)]]): A callback + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A callback which returns client certificate bytes and private key bytes both in PEM format. ``client_cert_source`` and ``client_encrypted_cert_source`` are mutually exclusive. - client_encrypted_cert_source (Optional[Callable[[], (str, str, bytes)]]): + client_encrypted_cert_source (Optional[Callable[[], Tuple[str, str, bytes]]]): A callback which returns client certificate file path, encrypted private key file path, and the passphrase bytes.``client_cert_source`` and ``client_encrypted_cert_source`` are mutually exclusive. quota_project_id (Optional[str]): A project name that a client's quota belongs to. - credentials_file (Optional[str]): A path to a file storing credentials. - ``credentials_file` and ``api_key`` are mutually exclusive. + credentials_file (Optional[str]): Deprecated. A path to a file storing credentials. + ``credentials_file` and ``api_key`` are mutually exclusive. This argument will be + removed in the next major version of `google-api-core`. + + .. warning:: + Important: If you accept a credential configuration (credential JSON/File/Stream) + from an external source for authentication to Google Cloud Platform, you must + validate it before providing it to any Google API or client library. Providing an + unvalidated credential configuration to Google APIs or libraries can compromise + the security of your systems and data. For more information, refer to + `Validate credential configurations from external sources`_. + + .. _Validate credential configurations from external sources: + + https://cloud.google.com/docs/authentication/external/externally-sourced-credentials scopes (Optional[Sequence[str]]): OAuth access token override scopes. api_key (Optional[str]): Google API key. ``credentials_file`` and ``api_key`` are mutually exclusive. @@ -75,6 +93,11 @@ class ClientOptions(object): authentication flows. Audience is typically a resource identifier. If not set, the service endpoint value will be used as a default. An example of a valid ``api_audience`` is: "https://language.googleapis.com". + universe_domain (Optional[str]): The desired universe domain. This must match + the one in credentials. If not set, the default universe domain is + `googleapis.com`. If both `api_endpoint` and `universe_domain` are set, + then `api_endpoint` is used as the service endpoint. If `api_endpoint` is + not specified, the format will be `{service}.{universe_domain}`. Raises: ValueError: If both ``client_cert_source`` and ``client_encrypted_cert_source`` @@ -83,15 +106,21 @@ class ClientOptions(object): def __init__( self, - api_endpoint=None, - client_cert_source=None, - client_encrypted_cert_source=None, - quota_project_id=None, - credentials_file=None, - scopes=None, - api_key=None, - api_audience=None, + api_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + client_encrypted_cert_source: Optional[ + Callable[[], Tuple[str, str, bytes]] + ] = None, + quota_project_id: Optional[str] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + api_key: Optional[str] = None, + api_audience: Optional[str] = None, + universe_domain: Optional[str] = None, ): + if credentials_file is not None: + warnings.warn(general_helpers._CREDENTIALS_FILE_WARNING, DeprecationWarning) + if client_cert_source and client_encrypted_cert_source: raise ValueError( "client_cert_source and client_encrypted_cert_source are mutually exclusive" @@ -106,12 +135,13 @@ def __init__( self.scopes = scopes self.api_key = api_key self.api_audience = api_audience + self.universe_domain = universe_domain - def __repr__(self): + def __repr__(self) -> str: return "ClientOptions: " + repr(self.__dict__) -def from_dict(options): +def from_dict(options: Mapping[str, object]) -> ClientOptions: """Construct a client options object from a mapping object. Args: diff --git a/google/api_core/datetime_helpers.py b/google/api_core/datetime_helpers.py index c584c8cd7..c37923000 100644 --- a/google/api_core/datetime_helpers.py +++ b/google/api_core/datetime_helpers.py @@ -42,7 +42,7 @@ def utcnow(): """A :meth:`datetime.datetime.utcnow()` alias to allow mocking in tests.""" - return datetime.datetime.utcnow() + return datetime.datetime.now(tz=datetime.timezone.utc).replace(tzinfo=None) def to_milliseconds(value): diff --git a/google/api_core/exceptions.py b/google/api_core/exceptions.py index d4cb99733..e3eb696c7 100644 --- a/google/api_core/exceptions.py +++ b/google/api_core/exceptions.py @@ -22,22 +22,27 @@ from __future__ import unicode_literals import http.client -from typing import Dict +from typing import Optional, Dict from typing import Union import warnings from google.rpc import error_details_pb2 + +def _warn_could_not_import_grpcio_status(): + warnings.warn( + "Please install grpcio-status to obtain helpful grpc error messages.", + ImportWarning, + ) # pragma: NO COVER + + try: import grpc try: from grpc_status import rpc_status except ImportError: # pragma: NO COVER - warnings.warn( - "Please install grpcio-status to obtain helpful grpc error messages.", - ImportWarning, - ) + _warn_could_not_import_grpcio_status() rpc_status = None except ImportError: # pragma: NO COVER grpc = None @@ -437,6 +442,12 @@ class DeadlineExceeded(GatewayTimeout): grpc_status_code = grpc.StatusCode.DEADLINE_EXCEEDED if grpc is not None else None +class AsyncRestUnsupportedParameterError(NotImplementedError): + """Raised when an unsupported parameter is configured against async rest transport.""" + + pass + + def exception_class_for_http_status(status_code): """Return the exception class for a specific HTTP status code. @@ -471,40 +482,50 @@ def from_http_status(status_code, message, **kwargs): return error -def from_http_response(response): - """Create a :class:`GoogleAPICallError` from a :class:`requests.Response`. +def _format_rest_error_message(error, method, url): + method = method.upper() if method else None + message = "{method} {url}: {error}".format( + method=method, + url=url, + error=error, + ) + return message + + +# NOTE: We're moving away from `from_http_status` because it expects an aiohttp response compared +# to `format_http_response_error` which expects a more abstract response from google.auth and is +# compatible with both sync and async response types. +# TODO(https://github.com/googleapis/python-api-core/issues/691): Add type hint for response. +def format_http_response_error( + response, method: str, url: str, payload: Optional[Dict] = None +): + """Create a :class:`GoogleAPICallError` from a google auth rest response. Args: - response (requests.Response): The HTTP response. + response Union[google.auth.transport.Response, google.auth.aio.transport.Response]: The HTTP response. + method Optional(str): The HTTP request method. + url Optional(str): The HTTP request url. + payload Optional(dict): The HTTP response payload. If not passed in, it is read from response for a response type of google.auth.transport.Response. Returns: GoogleAPICallError: An instance of the appropriate subclass of :class:`GoogleAPICallError`, with the message and errors populated from the response. """ - try: - payload = response.json() - except ValueError: - payload = {"error": {"message": response.text or "unknown error"}} - + payload = {} if not payload else payload error_message = payload.get("error", {}).get("message", "unknown error") errors = payload.get("error", {}).get("errors", ()) # In JSON, details are already formatted in developer-friendly way. details = payload.get("error", {}).get("details", ()) - error_info = list( + error_info_list = list( filter( lambda detail: detail.get("@type", "") == "type.googleapis.com/google.rpc.ErrorInfo", details, ) ) - error_info = error_info[0] if error_info else None - - message = "{method} {url}: {error}".format( - method=response.request.method, - url=response.request.url, - error=error_message, - ) + error_info = error_info_list[0] if error_info_list else None + message = _format_rest_error_message(error_message, method, url) exception = from_http_status( response.status_code, @@ -517,6 +538,26 @@ def from_http_response(response): return exception +def from_http_response(response): + """Create a :class:`GoogleAPICallError` from a :class:`requests.Response`. + + Args: + response (requests.Response): The HTTP response. + + Returns: + GoogleAPICallError: An instance of the appropriate subclass of + :class:`GoogleAPICallError`, with the message and errors populated + from the response. + """ + try: + payload = response.json() + except ValueError: + payload = {"error": {"message": response.text or "unknown error"}} + return format_http_response_error( + response, response.request.method, response.request.url, payload + ) + + def exception_class_for_grpc_status(status_code): """Return the exception class for a specific :class:`grpc.StatusCode`. @@ -560,6 +601,9 @@ def _is_informative_grpc_error(rpc_exc): def _parse_grpc_error_details(rpc_exc): + if not rpc_status: # pragma: NO COVER + _warn_could_not_import_grpcio_status() + return [], None try: status = rpc_status.from_call(rpc_exc) except NotImplementedError: # workaround diff --git a/google/api_core/gapic_v1/client_info.py b/google/api_core/gapic_v1/client_info.py index 2de1be7f2..4b3b56493 100644 --- a/google/api_core/gapic_v1/client_info.py +++ b/google/api_core/gapic_v1/client_info.py @@ -45,6 +45,9 @@ class ClientInfo(client_info.ClientInfo): user_agent (Optional[str]): Prefix to the user agent header. This is used to supply information such as application name or partner tool. Recommended format: ``application-or-tool-ID/major.minor.version``. + rest_version (Optional[str]): A string with labeled versions of the + dependencies used for REST transport. + protobuf_runtime_version (Optional[str]): The protobuf runtime version. """ def to_grpc_metadata(self): diff --git a/google/api_core/gapic_v1/method_async.py b/google/api_core/gapic_v1/method_async.py index 24880756d..c0f38c0e8 100644 --- a/google/api_core/gapic_v1/method_async.py +++ b/google/api_core/gapic_v1/method_async.py @@ -25,6 +25,8 @@ from google.api_core.gapic_v1.method import DEFAULT # noqa: F401 from google.api_core.gapic_v1.method import USE_DEFAULT_METADATA # noqa: F401 +_DEFAULT_ASYNC_TRANSPORT_KIND = "grpc_asyncio" + def wrap_method( func, @@ -32,6 +34,7 @@ def wrap_method( default_timeout=None, default_compression=None, client_info=client_info.DEFAULT_CLIENT_INFO, + kind=_DEFAULT_ASYNC_TRANSPORT_KIND, ): """Wrap an async RPC method with common behavior. @@ -40,7 +43,8 @@ def wrap_method( and ``compression`` arguments and applies the common error mapping, retry, timeout, metadata, and compression behavior to the low-level RPC method. """ - func = grpc_helpers_async.wrap_errors(func) + if kind == _DEFAULT_ASYNC_TRANSPORT_KIND: + func = grpc_helpers_async.wrap_errors(func) metadata = [client_info.to_grpc_metadata()] if client_info is not None else None diff --git a/google/api_core/general_helpers.py b/google/api_core/general_helpers.py index a6af45b7a..06282299d 100644 --- a/google/api_core/general_helpers.py +++ b/google/api_core/general_helpers.py @@ -14,3 +14,39 @@ # This import for backward compatibility only. from functools import wraps # noqa: F401 pragma: NO COVER + +_CREDENTIALS_FILE_WARNING = """\ +The `credentials_file` argument is deprecated because of a potential security risk. + +The `google.auth.load_credentials_from_file` method does not validate the credential +configuration. The security risk occurs when a credential configuration is accepted +from a source that is not under your control and used without validation on your side. + +If you know that you will be loading credential configurations of a +specific type, it is recommended to use a credential-type-specific +load method. + +This will ensure that an unexpected credential type with potential for +malicious intent is not loaded unintentionally. You might still have to do +validation for certain credential types. Please follow the recommendations +for that method. For example, if you want to load only service accounts, +you can create the service account credentials explicitly: + +``` +from google.cloud.vision_v1 import ImageAnnotatorClient +from google.oauth2 import service_account + +credentials = service_account.Credentials.from_service_account_file(filename) +client = ImageAnnotatorClient(credentials=credentials) +``` + +If you are loading your credential configuration from an untrusted source and have +not mitigated the risks (e.g. by validating the configuration yourself), make +these changes as soon as possible to prevent security risks to your environment. + +Regardless of the method used, it is always your responsibility to validate +configurations received from external sources. + +Refer to https://cloud.google.com/docs/authentication/external/externally-sourced-credentials +for more details. +""" diff --git a/google/api_core/grpc_helpers.py b/google/api_core/grpc_helpers.py index f52e180af..430b8ce48 100644 --- a/google/api_core/grpc_helpers.py +++ b/google/api_core/grpc_helpers.py @@ -13,20 +13,19 @@ # limitations under the License. """Helpers for :mod:`grpc`.""" - import collections import functools -import logging +from typing import Generic, Iterator, Optional, TypeVar import warnings -import grpc - -from google.api_core import exceptions import google.auth import google.auth.credentials import google.auth.transport.grpc import google.auth.transport.requests import google.protobuf +import grpc + +from google.api_core import exceptions, general_helpers PROTOBUF_VERSION = google.protobuf.__version__ @@ -52,7 +51,8 @@ # The list of gRPC Callable interfaces that return iterators. _STREAM_WRAP_CLASSES = (grpc.UnaryStreamMultiCallable, grpc.StreamStreamMultiCallable) -_LOGGER = logging.getLogger(__name__) +# denotes the proto response type for grpc calls +P = TypeVar("P") def _patch_callable_name(callable_): @@ -79,7 +79,7 @@ def error_remapped_callable(*args, **kwargs): return error_remapped_callable -class _StreamingResponseIterator(grpc.Call): +class _StreamingResponseIterator(Generic[P], grpc.Call): def __init__(self, wrapped, prefetch_first_result=True): self._wrapped = wrapped @@ -97,11 +97,11 @@ def __init__(self, wrapped, prefetch_first_result=True): # ignore stop iteration at this time. This should be handled outside of retry. pass - def __iter__(self): + def __iter__(self) -> Iterator[P]: """This iterator is also an iterable that returns itself.""" return self - def __next__(self): + def __next__(self) -> P: """Get the next response from the stream. Returns: @@ -144,6 +144,10 @@ def trailing_metadata(self): return self._wrapped.trailing_metadata() +# public type alias denoting the return type of streaming gapic calls +GrpcStream = _StreamingResponseIterator[P] + + def _wrap_stream_errors(callable_): """Wrap errors for Unary-Stream and Stream-Stream gRPC callables. @@ -208,9 +212,22 @@ def _create_composite_credentials( credentials (google.auth.credentials.Credentials): The credentials. If not specified, then this function will attempt to ascertain the credentials from the environment using :func:`google.auth.default`. - credentials_file (str): A file with credentials that can be loaded with + credentials_file (str): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is - mutually exclusive with credentials. + mutually exclusive with credentials. This argument will be + removed in the next major version of `google-api-core`. + + .. warning:: + Important: If you accept a credential configuration (credential JSON/File/Stream) + from an external source for authentication to Google Cloud Platform, you must + validate it before providing it to any Google API or client library. Providing an + unvalidated credential configuration to Google APIs or libraries can compromise + the security of your systems and data. For more information, refer to + `Validate credential configurations from external sources`_. + + .. _Validate credential configurations from external sources: + + https://cloud.google.com/docs/authentication/external/externally-sourced-credentials default_scopes (Sequence[str]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. @@ -228,6 +245,9 @@ def _create_composite_credentials( Raises: google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed. """ + if credentials_file is not None: + warnings.warn(general_helpers._CREDENTIALS_FILE_WARNING, DeprecationWarning) + if credentials and credentials_file: raise exceptions.DuplicateCredentialArgs( "'credentials' and 'credentials_file' are mutually exclusive." @@ -263,11 +283,24 @@ def _create_composite_credentials( # Create a set of grpc.CallCredentials using the metadata plugin. google_auth_credentials = grpc.metadata_call_credentials(metadata_plugin) - if ssl_credentials is None: - ssl_credentials = grpc.ssl_channel_credentials() - - # Combine the ssl credentials and the authorization credentials. - return grpc.composite_channel_credentials(ssl_credentials, google_auth_credentials) + # if `ssl_credentials` is set, use `grpc.composite_channel_credentials` instead of + # `grpc.compute_engine_channel_credentials` as the former supports passing + # `ssl_credentials` via `channel_credentials` which is needed for mTLS. + if ssl_credentials: + # Combine the ssl credentials and the authorization credentials. + # See https://grpc.github.io/grpc/python/grpc.html#grpc.composite_channel_credentials + return grpc.composite_channel_credentials( + ssl_credentials, google_auth_credentials + ) + else: + # Use grpc.compute_engine_channel_credentials in order to support Direct Path. + # See https://grpc.github.io/grpc/python/grpc.html#grpc.compute_engine_channel_credentials + # TODO(https://github.com/googleapis/python-api-core/issues/598): + # Although `grpc.compute_engine_channel_credentials` returns channel credentials + # outside of a Google Compute Engine environment (GCE), we should determine if + # there is a way to reliably detect a GCE environment so that + # `grpc.compute_engine_channel_credentials` is not called outside of GCE. + return grpc.compute_engine_channel_credentials(google_auth_credentials) def create_channel( @@ -280,6 +313,7 @@ def create_channel( default_scopes=None, default_host=None, compression=None, + attempt_direct_path: Optional[bool] = False, **kwargs, ): """Create a secure channel with credentials. @@ -297,12 +331,40 @@ def create_channel( credentials_file (str): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. + + .. warning:: + Important: If you accept a credential configuration (credential JSON/File/Stream) + from an external source for authentication to Google Cloud Platform, you must + validate it before providing it to any Google API or client library. Providing an + unvalidated credential configuration to Google APIs or libraries can compromise + the security of your systems and data. For more information, refer to + `Validate credential configurations from external sources`_. + + .. _Validate credential configurations from external sources: + + https://cloud.google.com/docs/authentication/external/externally-sourced-credentials quota_project_id (str): An optional project to use for billing and quota. default_scopes (Sequence[str]): Default scopes passed by a Google client library. Use 'scopes' for user-defined scopes. default_host (str): The default endpoint. e.g., "pubsub.googleapis.com". compression (grpc.Compression): An optional value indicating the compression method to be used over the lifetime of the channel. + attempt_direct_path (Optional[bool]): If set, Direct Path will be attempted + when the request is made. Direct Path is only available within a Google + Compute Engine (GCE) environment and provides a proxyless connection + which increases the available throughput, reduces latency, and increases + reliability. Note: + + - This argument should only be set in a GCE environment and for Services + that are known to support Direct Path. + - If this argument is set outside of GCE, then this request will fail + unless the back-end service happens to have configured fall-back to DNS. + - If the request causes a `ServiceUnavailable` response, it is recommended + that the client repeat the request with `attempt_direct_path` set to + `False` as the Service may not support Direct Path. + - Using `ssl_credentials` with `attempt_direct_path` set to `True` will + result in `ValueError` as this combination is not yet supported. + kwargs: Additional key-word args passed to :func:`grpc_gcp.secure_channel` or :func:`grpc.secure_channel`. Note: `grpc_gcp` is only supported in environments with protobuf < 4.0.0. @@ -312,8 +374,15 @@ def create_channel( Raises: google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed. + ValueError: If `ssl_credentials` is set and `attempt_direct_path` is set to `True`. """ + # If `ssl_credentials` is set and `attempt_direct_path` is set to `True`, + # raise ValueError as this is not yet supported. + # See https://github.com/googleapis/python-api-core/issues/590 + if ssl_credentials and attempt_direct_path: + raise ValueError("Using ssl_credentials with Direct Path is not supported") + composite_credentials = _create_composite_credentials( credentials=credentials, credentials_file=credentials_file, @@ -324,17 +393,58 @@ def create_channel( default_host=default_host, ) + # Note that grpcio-gcp is deprecated if HAS_GRPC_GCP: # pragma: NO COVER if compression is not None and compression != grpc.Compression.NoCompression: - _LOGGER.debug( - "Compression argument is being ignored for grpc_gcp.secure_channel creation." + warnings.warn( + "The `compression` argument is ignored for grpc_gcp.secure_channel creation.", + DeprecationWarning, + ) + if attempt_direct_path: + warnings.warn( + """The `attempt_direct_path` argument is ignored for grpc_gcp.secure_channel creation.""", + DeprecationWarning, ) return grpc_gcp.secure_channel(target, composite_credentials, **kwargs) + + if attempt_direct_path: + target = _modify_target_for_direct_path(target) + return grpc.secure_channel( target, composite_credentials, compression=compression, **kwargs ) +def _modify_target_for_direct_path(target: str) -> str: + """ + Given a target, return a modified version which is compatible with Direct Path. + + Args: + target (str): The target service address in the format 'hostname[:port]' or + 'dns://hostname[:port]'. + + Returns: + target (str): The target service address which is converted into a format compatible with Direct Path. + If the target contains `dns:///` or does not contain `:///`, the target will be converted in + a format compatible with Direct Path; otherwise the original target will be returned as the + original target may already denote Direct Path. + """ + + # A DNS prefix may be included with the target to indicate the endpoint is living in the Internet, + # outside of Google Cloud Platform. + dns_prefix = "dns:///" + # Remove "dns:///" if `attempt_direct_path` is set to True as + # the Direct Path prefix `google-c2p:///` will be used instead. + target = target.replace(dns_prefix, "") + + direct_path_separator = ":///" + if direct_path_separator not in target: + target_without_port = target.split(":")[0] + # Modify the target to use Direct Path by adding the `google-c2p:///` prefix + target = f"google-c2p{direct_path_separator}{target_without_port}" + return target + + _MethodCall = collections.namedtuple( "_MethodCall", ("request", "timeout", "metadata", "credentials", "compression") ) @@ -486,20 +596,42 @@ def __getattr__(self, key): except KeyError: raise AttributeError - def unary_unary(self, method, request_serializer=None, response_deserializer=None): + def unary_unary( + self, + method, + request_serializer=None, + response_deserializer=None, + _registered_method=False, + ): """grpc.Channel.unary_unary implementation.""" return self._stub_for_method(method) - def unary_stream(self, method, request_serializer=None, response_deserializer=None): + def unary_stream( + self, + method, + request_serializer=None, + response_deserializer=None, + _registered_method=False, + ): """grpc.Channel.unary_stream implementation.""" return self._stub_for_method(method) - def stream_unary(self, method, request_serializer=None, response_deserializer=None): + def stream_unary( + self, + method, + request_serializer=None, + response_deserializer=None, + _registered_method=False, + ): """grpc.Channel.stream_unary implementation.""" return self._stub_for_method(method) def stream_stream( - self, method, request_serializer=None, response_deserializer=None + self, + method, + request_serializer=None, + response_deserializer=None, + _registered_method=False, ): """grpc.Channel.stream_stream implementation.""" return self._stub_for_method(method) diff --git a/google/api_core/grpc_helpers_async.py b/google/api_core/grpc_helpers_async.py index d1f69d98f..9e1ad1105 100644 --- a/google/api_core/grpc_helpers_async.py +++ b/google/api_core/grpc_helpers_async.py @@ -20,12 +20,17 @@ import asyncio import functools +import warnings + +from typing import AsyncGenerator, Generic, Iterator, Optional, TypeVar import grpc from grpc import aio -from google.api_core import exceptions, grpc_helpers +from google.api_core import exceptions, general_helpers, grpc_helpers +# denotes the proto response type for grpc calls +P = TypeVar("P") # NOTE(lidiz) Alternatively, we can hack "__getattribute__" to perform # automatic patching for us. But that means the overhead of creating an @@ -75,8 +80,8 @@ async def wait_for_connection(self): raise exceptions.from_grpc_error(rpc_error) from rpc_error -class _WrappedUnaryResponseMixin(_WrappedCall): - def __await__(self): +class _WrappedUnaryResponseMixin(Generic[P], _WrappedCall): + def __await__(self) -> Iterator[P]: try: response = yield from self._call.__await__() return response @@ -84,17 +89,17 @@ def __await__(self): raise exceptions.from_grpc_error(rpc_error) from rpc_error -class _WrappedStreamResponseMixin(_WrappedCall): +class _WrappedStreamResponseMixin(Generic[P], _WrappedCall): def __init__(self): self._wrapped_async_generator = None - async def read(self): + async def read(self) -> P: try: return await self._call.read() except grpc.RpcError as rpc_error: raise exceptions.from_grpc_error(rpc_error) from rpc_error - async def _wrapped_aiter(self): + async def _wrapped_aiter(self) -> AsyncGenerator[P, None]: try: # NOTE(lidiz) coverage doesn't understand the exception raised from # __anext__ method. It is covered by test case: @@ -104,7 +109,7 @@ async def _wrapped_aiter(self): except grpc.RpcError as rpc_error: raise exceptions.from_grpc_error(rpc_error) from rpc_error - def __aiter__(self): + def __aiter__(self) -> AsyncGenerator[P, None]: if not self._wrapped_async_generator: self._wrapped_async_generator = self._wrapped_aiter() return self._wrapped_async_generator @@ -127,29 +132,34 @@ async def done_writing(self): # NOTE(lidiz) Implementing each individual class separately, so we don't # expose any API that should not be seen. E.g., __aiter__ in unary-unary # RPC, or __await__ in stream-stream RPC. -class _WrappedUnaryUnaryCall(_WrappedUnaryResponseMixin, aio.UnaryUnaryCall): +class _WrappedUnaryUnaryCall(_WrappedUnaryResponseMixin[P], aio.UnaryUnaryCall): """Wrapped UnaryUnaryCall to map exceptions.""" -class _WrappedUnaryStreamCall(_WrappedStreamResponseMixin, aio.UnaryStreamCall): +class _WrappedUnaryStreamCall(_WrappedStreamResponseMixin[P], aio.UnaryStreamCall): """Wrapped UnaryStreamCall to map exceptions.""" class _WrappedStreamUnaryCall( - _WrappedUnaryResponseMixin, _WrappedStreamRequestMixin, aio.StreamUnaryCall + _WrappedUnaryResponseMixin[P], _WrappedStreamRequestMixin, aio.StreamUnaryCall ): """Wrapped StreamUnaryCall to map exceptions.""" class _WrappedStreamStreamCall( - _WrappedStreamRequestMixin, _WrappedStreamResponseMixin, aio.StreamStreamCall + _WrappedStreamRequestMixin, _WrappedStreamResponseMixin[P], aio.StreamStreamCall ): """Wrapped StreamStreamCall to map exceptions.""" +# public type alias denoting the return type of async streaming gapic calls +GrpcAsyncStream = _WrappedStreamResponseMixin +# public type alias denoting the return type of unary gapic calls +AwaitableGrpcCall = _WrappedUnaryResponseMixin + + def _wrap_unary_errors(callable_): """Map errors for Unary-Unary async callables.""" - grpc_helpers._patch_callable_name(callable_) @functools.wraps(callable_) def error_remapped_callable(*args, **kwargs): @@ -159,23 +169,13 @@ def error_remapped_callable(*args, **kwargs): return error_remapped_callable -def _wrap_stream_errors(callable_): +def _wrap_stream_errors(callable_, wrapper_type): """Map errors for streaming RPC async callables.""" - grpc_helpers._patch_callable_name(callable_) @functools.wraps(callable_) async def error_remapped_callable(*args, **kwargs): call = callable_(*args, **kwargs) - - if isinstance(call, aio.UnaryStreamCall): - call = _WrappedUnaryStreamCall().with_call(call) - elif isinstance(call, aio.StreamUnaryCall): - call = _WrappedStreamUnaryCall().with_call(call) - elif isinstance(call, aio.StreamStreamCall): - call = _WrappedStreamStreamCall().with_call(call) - else: - raise TypeError("Unexpected type of call %s" % type(call)) - + call = wrapper_type().with_call(call) await call.wait_for_connection() return call @@ -197,10 +197,16 @@ def wrap_errors(callable_): Returns: Callable: The wrapped gRPC callable. """ - if isinstance(callable_, aio.UnaryUnaryMultiCallable): - return _wrap_unary_errors(callable_) + grpc_helpers._patch_callable_name(callable_) + + if isinstance(callable_, aio.UnaryStreamMultiCallable): + return _wrap_stream_errors(callable_, _WrappedUnaryStreamCall) + elif isinstance(callable_, aio.StreamUnaryMultiCallable): + return _wrap_stream_errors(callable_, _WrappedStreamUnaryCall) + elif isinstance(callable_, aio.StreamStreamMultiCallable): + return _wrap_stream_errors(callable_, _WrappedStreamStreamCall) else: - return _wrap_stream_errors(callable_) + return _wrap_unary_errors(callable_) def create_channel( @@ -213,7 +219,8 @@ def create_channel( default_scopes=None, default_host=None, compression=None, - **kwargs + attempt_direct_path: Optional[bool] = False, + **kwargs, ): """Create an AsyncIO secure channel with credentials. @@ -227,15 +234,44 @@ def create_channel( are passed to :func:`google.auth.default`. ssl_credentials (grpc.ChannelCredentials): Optional SSL channel credentials. This can be used to specify different certificates. - credentials_file (str): A file with credentials that can be loaded with + credentials_file (str): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is - mutually exclusive with credentials. + mutually exclusive with credentials. This argument will be + removed in the next major version of `google-api-core`. + + .. warning:: + Important: If you accept a credential configuration (credential JSON/File/Stream) + from an external source for authentication to Google Cloud Platform, you must + validate it before providing it to any Google API or client library. Providing an + unvalidated credential configuration to Google APIs or libraries can compromise + the security of your systems and data. For more information, refer to + `Validate credential configurations from external sources`_. + + .. _Validate credential configurations from external sources: + + https://cloud.google.com/docs/authentication/external/externally-sourced-credentials quota_project_id (str): An optional project to use for billing and quota. default_scopes (Sequence[str]): Default scopes passed by a Google client library. Use 'scopes' for user-defined scopes. default_host (str): The default endpoint. e.g., "pubsub.googleapis.com". compression (grpc.Compression): An optional value indicating the compression method to be used over the lifetime of the channel. + attempt_direct_path (Optional[bool]): If set, Direct Path will be attempted + when the request is made. Direct Path is only available within a Google + Compute Engine (GCE) environment and provides a proxyless connection + which increases the available throughput, reduces latency, and increases + reliability. Note: + + - This argument should only be set in a GCE environment and for Services + that are known to support Direct Path. + - If this argument is set outside of GCE, then this request will fail + unless the back-end service happens to have configured fall-back to DNS. + - If the request causes a `ServiceUnavailable` response, it is recommended + that the client repeat the request with `attempt_direct_path` set to + `False` as the Service may not support Direct Path. + - Using `ssl_credentials` with `attempt_direct_path` set to `True` will + result in `ValueError` as this combination is not yet supported. + kwargs: Additional key-word args passed to :func:`aio.secure_channel`. Returns: @@ -243,8 +279,18 @@ def create_channel( Raises: google.api_core.DuplicateCredentialArgs: If both a credentials object and credentials_file are passed. + ValueError: If `ssl_credentials` is set and `attempt_direct_path` is set to `True`. """ + if credentials_file is not None: + warnings.warn(general_helpers._CREDENTIALS_FILE_WARNING, DeprecationWarning) + + # If `ssl_credentials` is set and `attempt_direct_path` is set to `True`, + # raise ValueError as this is not yet supported. + # See https://github.com/googleapis/python-api-core/issues/590 + if ssl_credentials and attempt_direct_path: + raise ValueError("Using ssl_credentials with Direct Path is not supported") + composite_credentials = grpc_helpers._create_composite_credentials( credentials=credentials, credentials_file=credentials_file, @@ -255,6 +301,9 @@ def create_channel( default_host=default_host, ) + if attempt_direct_path: + target = grpc_helpers._modify_target_for_direct_path(target) + return aio.secure_channel( target, composite_credentials, compression=compression, **kwargs ) diff --git a/google/api_core/operation.py b/google/api_core/operation.py index 4b9c9a58b..5206243a7 100644 --- a/google/api_core/operation.py +++ b/google/api_core/operation.py @@ -78,7 +78,7 @@ def __init__( result_type, metadata_type=None, polling=polling.DEFAULT_POLLING, - **kwargs + **kwargs, ): super(Operation, self).__init__(polling=polling, **kwargs) self._operation = operation diff --git a/google/api_core/operations_v1/__init__.py b/google/api_core/operations_v1/__init__.py index 61186451f..4db32a4cb 100644 --- a/google/api_core/operations_v1/__init__.py +++ b/google/api_core/operations_v1/__init__.py @@ -25,3 +25,16 @@ "OperationsClient", "OperationsRestTransport" ] + +try: + from google.api_core.operations_v1.transports.rest_asyncio import ( + AsyncOperationsRestTransport, + ) + from google.api_core.operations_v1.operations_rest_client_async import AsyncOperationsRestClient + + __all__ += ["AsyncOperationsRestClient", "AsyncOperationsRestTransport"] +except ImportError: + # This import requires the `async_rest` extra. + # Don't raise an exception if `AsyncOperationsRestTransport` cannot be imported + # as other transports are still available. + pass diff --git a/google/api_core/operations_v1/abstract_operations_base_client.py b/google/api_core/operations_v1/abstract_operations_base_client.py new file mode 100644 index 000000000..f62f60b04 --- /dev/null +++ b/google/api_core/operations_v1/abstract_operations_base_client.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import OrderedDict +import os +import re +from typing import Dict, Optional, Type, Union + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core.operations_v1.transports.base import ( + DEFAULT_CLIENT_INFO, + OperationsTransport, +) +from google.api_core.operations_v1.transports.rest import OperationsRestTransport + +try: + from google.api_core.operations_v1.transports.rest_asyncio import ( + AsyncOperationsRestTransport, + ) + + HAS_ASYNC_REST_DEPENDENCIES = True +except ImportError as e: + HAS_ASYNC_REST_DEPENDENCIES = False + ASYNC_REST_EXCEPTION = e + +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore + + +class AbstractOperationsBaseClientMeta(type): + """Metaclass for the Operations Base client. + + This provides base class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[OperationsTransport]] + _transport_registry["rest"] = OperationsRestTransport + if HAS_ASYNC_REST_DEPENDENCIES: + _transport_registry["rest_asyncio"] = AsyncOperationsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[OperationsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if ( + label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES + ): # pragma: NO COVER + raise ASYNC_REST_EXCEPTION + + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AbstractOperationsBaseClient(metaclass=AbstractOperationsBaseClientMeta): + """Manages long-running operations with an API service. + + When an API method normally takes long time to complete, it can be + designed to return [Operation][google.api_core.operations_v1.Operation] to the + client, and the client can use this interface to receive the real + response asynchronously by polling the operation resource, or pass + the operation resource to another API (such as Google Cloud Pub/Sub + API) to receive the response. Any API service that returns + long-running operations should implement the ``Operations`` + interface so developers can have a consistent client experience. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "longrunning.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """ + This class method should be overridden by the subclasses. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Raises: + NotImplementedError: If the method is called on the base class. + """ + raise NotImplementedError("`from_service_account_info` is not implemented.") + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """ + This class method should be overridden by the subclasses. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Raises: + NotImplementedError: If the method is called on the base class. + """ + raise NotImplementedError("`from_service_account_file` is not implemented.") + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> OperationsTransport: + """Returns the transport used by the client instance. + + Returns: + OperationsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, OperationsTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the operations client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, OperationsTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + if hasattr(mtls, "should_use_client_cert"): + use_client_cert = mtls.should_use_client_cert() + else: + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + use_client_cert = use_client_cert_str == "true" + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, OperationsTransport): + # transport is a OperationsTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + ) diff --git a/google/api_core/operations_v1/abstract_operations_client.py b/google/api_core/operations_v1/abstract_operations_client.py index 38f532af5..fc4453625 100644 --- a/google/api_core/operations_v1/abstract_operations_client.py +++ b/google/api_core/operations_v1/abstract_operations_client.py @@ -13,10 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from collections import OrderedDict -import os -import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union +from typing import Optional, Sequence, Tuple, Union from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import gapic_v1 # type: ignore @@ -26,10 +23,10 @@ DEFAULT_CLIENT_INFO, OperationsTransport, ) -from google.api_core.operations_v1.transports.rest import OperationsRestTransport +from google.api_core.operations_v1.abstract_operations_base_client import ( + AbstractOperationsBaseClient, +) from google.auth import credentials as ga_credentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.auth.transport import mtls # type: ignore from google.longrunning import operations_pb2 from google.oauth2 import service_account # type: ignore import grpc @@ -37,40 +34,7 @@ OptionalRetry = Union[retries.Retry, object] -class AbstractOperationsClientMeta(type): - """Metaclass for the Operations client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - - _transport_registry = OrderedDict() # type: Dict[str, Type[OperationsTransport]] - _transport_registry["rest"] = OperationsRestTransport - - def get_transport_class( - cls, - label: Optional[str] = None, - ) -> Type[OperationsTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class AbstractOperationsClient(metaclass=AbstractOperationsClientMeta): +class AbstractOperationsClient(AbstractOperationsBaseClient): """Manages long-running operations with an API service. When an API method normally takes long time to complete, it can be @@ -83,165 +47,6 @@ class AbstractOperationsClient(metaclass=AbstractOperationsClientMeta): interface so developers can have a consistent client experience. """ - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "longrunning.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AbstractOperationsClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - AbstractOperationsClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> OperationsTransport: - """Returns the transport used by the client instance. - - Returns: - OperationsTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def common_billing_account_path( - billing_account: str, - ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str, str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path( - folder: str, - ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format( - folder=folder, - ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str, str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path( - organization: str, - ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format( - organization=organization, - ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str, str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path( - project: str, - ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format( - project=project, - ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str, str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path( - project: str, - location: str, - ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format( - project=project, - location=location, - ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str, str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - def __init__( self, *, @@ -287,80 +92,49 @@ def __init__( google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = os.getenv( - "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" - ).lower() - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - client_cert_source_func = None - is_mtls = False - if use_client_cert == "true": - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, OperationsTransport): - # transport is a OperationsTransport instance. - if credentials or client_options.credentials_file: - raise ValueError( - "When providing a transport instance, " - "provide its credentials directly." - ) - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - ) + super().__init__( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AbstractOperationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AbstractOperationsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file def list_operations( self, diff --git a/google/api_core/operations_v1/operations_rest_client_async.py b/google/api_core/operations_v1/operations_rest_client_async.py new file mode 100644 index 000000000..7ab0cd36f --- /dev/null +++ b/google/api_core/operations_v1/operations_rest_client_async.py @@ -0,0 +1,345 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Optional, Sequence, Tuple, Union + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core.operations_v1 import pagers_async as pagers +from google.api_core.operations_v1.transports.base import ( + DEFAULT_CLIENT_INFO, + OperationsTransport, +) +from google.api_core.operations_v1.abstract_operations_base_client import ( + AbstractOperationsBaseClient, +) +from google.longrunning import operations_pb2 + +try: + from google.auth.aio import credentials as ga_credentials # type: ignore +except ImportError as e: # pragma: NO COVER + raise ImportError( + "The `async_rest` extra of `google-api-core` is required to use long-running operations. Install it by running " + "`pip install google-api-core[async_rest]`." + ) from e + + +class AsyncOperationsRestClient(AbstractOperationsBaseClient): + """Manages long-running operations with a REST API service for the asynchronous client. + + When an API method normally takes long time to complete, it can be + designed to return [Operation][google.api_core.operations_v1.Operation] to the + client, and the client can use this interface to receive the real + response asynchronously by polling the operation resource, or pass + the operation resource to another API (such as Google Cloud Pub/Sub + API) to receive the response. Any API service that returns + long-running operations should implement the ``Operations`` + interface so developers can have a consistent client experience. + """ + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, OperationsTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the operations client. + + Args: + credentials (Optional[google.auth.aio.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, OperationsTransport]): The + transport to use. If set to None, this defaults to 'rest_asyncio'. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + super().__init__( + credentials=credentials, # type: ignore + # NOTE: If a transport is not provided, we force the client to use the async + # REST transport. + transport=transport or "rest_asyncio", + client_options=client_options, + client_info=client_info, + ) + + async def get_operation( + self, + name: str, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry` + # to allow configuring retryable error codes. + retry=gapic_v1.method_async.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + Clients can use this method to poll the operation result + at intervals as recommended by the API service. + + Args: + name (str): + The name of the operation resource. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.longrunning.operations_pb2.Operation: + This resource represents a long- + running operation that is the result of a + network API call. + + """ + + request = operations_pb2.GetOperationRequest(name=name) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata or ()) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + name: str, + filter_: Optional[str] = None, + *, + page_size: Optional[int] = None, + page_token: Optional[str] = None, + # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry` + # to allow configuring retryable error codes. + retry=gapic_v1.method_async.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListOperationsAsyncPager: + r"""Lists operations that match the specified filter in the request. + If the server doesn't support this method, it returns + ``UNIMPLEMENTED``. + + NOTE: the ``name`` binding allows API services to override the + binding to use different resource name schemes, such as + ``users/*/operations``. To override the binding, API services + can add a binding such as ``"/v1/{name=users/*}/operations"`` to + their service configuration. For backwards compatibility, the + default name includes the operations collection id, however + overriding users must ensure the name binding is the parent + resource, without the operations collection id. + + Args: + name (str): + The name of the operation's parent + resource. + filter_ (str): + The standard list filter. + This corresponds to the ``filter`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.api_core.operations_v1.pagers.ListOperationsPager: + The response message for + [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations]. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create a protobuf request object. + request = operations_pb2.ListOperationsRequest(name=name, filter=filter_) + if page_size is not None: + request.page_size = page_size + if page_token is not None: + request.page_token = page_token + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata or ()) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListOperationsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + name: str, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry` + # to allow configuring retryable error codes. + retry=gapic_v1.method_async.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running operation. This method indicates that the + client is no longer interested in the operation result. It does + not cancel the operation. If the server doesn't support this + method, it returns ``google.rpc.Code.UNIMPLEMENTED``. + + Args: + name (str): + The name of the operation resource to + be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create the request object. + request = operations_pb2.DeleteOperationRequest(name=name) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata or ()) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + name: Optional[str] = None, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry` + # to allow configuring retryable error codes. + retry=gapic_v1.method_async.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + The server makes a best effort to cancel the operation, but + success is not guaranteed. If the server doesn't support this + method, it returns ``google.rpc.Code.UNIMPLEMENTED``. Clients + can use + [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation] + or other methods to check whether the cancellation succeeded or + whether the operation completed despite cancellation. On + successful cancellation, the operation is not deleted; instead, + it becomes an operation with an + [Operation.error][google.api_core.operations_v1.Operation.error] value with + a [google.rpc.Status.code][google.rpc.Status.code] of 1, + corresponding to ``Code.CANCELLED``. + + Args: + name (str): + The name of the operation resource to + be cancelled. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create the request object. + request = operations_pb2.CancelOperationRequest(name=name) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata or ()) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) diff --git a/google/api_core/operations_v1/pagers.py b/google/api_core/operations_v1/pagers.py index b8a47757b..76efd5946 100644 --- a/google/api_core/operations_v1/pagers.py +++ b/google/api_core/operations_v1/pagers.py @@ -14,7 +14,6 @@ # limitations under the License. # from typing import ( - Any, Callable, Iterator, Sequence, @@ -22,9 +21,10 @@ ) from google.longrunning import operations_pb2 +from google.api_core.operations_v1.pagers_base import ListOperationsPagerBase -class ListOperationsPager: +class ListOperationsPager(ListOperationsPagerBase): """A pager for iterating through ``list_operations`` requests. This class thinly wraps an initial @@ -48,27 +48,11 @@ def __init__( request: operations_pb2.ListOperationsRequest, response: operations_pb2.ListOperationsResponse, *, - metadata: Sequence[Tuple[str, str]] = () + metadata: Sequence[Tuple[str, str]] = (), ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.longrunning.operations_pb2.ListOperationsRequest): - The initial request object. - response (google.longrunning.operations_pb2.ListOperationsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = request - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) + super().__init__( + method=method, request=request, response=response, metadata=metadata + ) @property def pages(self) -> Iterator[operations_pb2.ListOperationsResponse]: @@ -81,6 +65,3 @@ def pages(self) -> Iterator[operations_pb2.ListOperationsResponse]: def __iter__(self) -> Iterator[operations_pb2.Operation]: for page in self.pages: yield from page.operations - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/api_core/operations_v1/pagers_async.py b/google/api_core/operations_v1/pagers_async.py new file mode 100644 index 000000000..4bb7f8c7d --- /dev/null +++ b/google/api_core/operations_v1/pagers_async.py @@ -0,0 +1,71 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Callable, + AsyncIterator, + Sequence, + Tuple, +) + +from google.longrunning import operations_pb2 +from google.api_core.operations_v1.pagers_base import ListOperationsPagerBase + + +class ListOperationsAsyncPager(ListOperationsPagerBase): + """A pager for iterating through ``list_operations`` requests. + + This class thinly wraps an initial + :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., operations_pb2.ListOperationsResponse], + request: operations_pb2.ListOperationsRequest, + response: operations_pb2.ListOperationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = (), + ): + super().__init__( + method=method, request=request, response=response, metadata=metadata + ) + + @property + async def pages(self) -> AsyncIterator[operations_pb2.ListOperationsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterator[operations_pb2.Operation]: + async def async_generator(): + async for page in self.pages: + for operation in page.operations: + yield operation + + return async_generator() diff --git a/google/api_core/operations_v1/pagers_base.py b/google/api_core/operations_v1/pagers_base.py new file mode 100644 index 000000000..5ef8384ef --- /dev/null +++ b/google/api_core/operations_v1/pagers_base.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + Callable, + Sequence, + Tuple, +) + +from google.longrunning import operations_pb2 + + +class ListOperationsPagerBase: + """A pager for iterating through ``list_operations`` requests. + + This class thinly wraps an initial + :class:`google.longrunning.operations_pb2.ListOperationsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``operations`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListOperations`` requests and continue to iterate + through the ``operations`` field on the + corresponding responses. + + All the usual :class:`google.longrunning.operations_pb2.ListOperationsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., operations_pb2.ListOperationsResponse], + request: operations_pb2.ListOperationsRequest, + response: operations_pb2.ListOperationsResponse, + *, + metadata: Sequence[Tuple[str, str]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.longrunning.operations_pb2.ListOperationsRequest): + The initial request object. + response (google.longrunning.operations_pb2.ListOperationsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = request + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/api_core/operations_v1/transports/__init__.py b/google/api_core/operations_v1/transports/__init__.py index df53e15e4..8c24ce6ef 100644 --- a/google/api_core/operations_v1/transports/__init__.py +++ b/google/api_core/operations_v1/transports/__init__.py @@ -14,16 +14,26 @@ # limitations under the License. # from collections import OrderedDict +from typing import cast, Dict, Tuple from .base import OperationsTransport from .rest import OperationsRestTransport - # Compile a registry of transports. -_transport_registry = OrderedDict() -_transport_registry["rest"] = OperationsRestTransport +_transport_registry: Dict[str, OperationsTransport] = OrderedDict() +_transport_registry["rest"] = cast(OperationsTransport, OperationsRestTransport) + +__all__: Tuple[str, ...] = ("OperationsTransport", "OperationsRestTransport") + +try: + from .rest_asyncio import AsyncOperationsRestTransport -__all__ = ( - "OperationsTransport", - "OperationsRestTransport", -) + __all__ += ("AsyncOperationsRestTransport",) + _transport_registry["rest_asyncio"] = cast( + OperationsTransport, AsyncOperationsRestTransport + ) +except ImportError: + # This import requires the `async_rest` extra. + # Don't raise an exception if `AsyncOperationsRestTransport` cannot be imported + # as other transports are still available. + pass diff --git a/google/api_core/operations_v1/transports/base.py b/google/api_core/operations_v1/transports/base.py index 98cf7896a..46c2f5d16 100644 --- a/google/api_core/operations_v1/transports/base.py +++ b/google/api_core/operations_v1/transports/base.py @@ -14,20 +14,26 @@ # limitations under the License. # import abc +import re from typing import Awaitable, Callable, Optional, Sequence, Union +import warnings -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.api_core import version import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.longrunning import operations_pb2 from google.oauth2 import service_account # type: ignore -from google.protobuf import empty_pb2 # type: ignore +import google.protobuf +from google.protobuf import empty_pb2, json_format # type: ignore from grpc import Compression +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import general_helpers +from google.api_core import retry as retries # type: ignore +from google.api_core import version + +PROTOBUF_VERSION = google.protobuf.__version__ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=version.__version__, @@ -45,12 +51,14 @@ def __init__( self, *, host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, + # TODO(https://github.com/googleapis/python-api-core/issues/709): update type hint for credentials to include `google.auth.aio.Credentials`. + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool] = False, + url_scheme="https", **kwargs, ) -> None: """Instantiate the transport. @@ -63,9 +71,22 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of `google-api-core`. + + .. warning:: + Important: If you accept a credential configuration (credential JSON/File/Stream) + from an external source for authentication to Google Cloud Platform, you must + validate it before providing it to any Google API or client library. Providing an + unvalidated credential configuration to Google APIs or libraries can compromise + the security of your systems and data. For more information, refer to + `Validate credential configurations from external sources`_. + + .. _Validate credential configurations from external sources: + + https://cloud.google.com/docs/authentication/external/externally-sourced-credentials scopes (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. @@ -76,10 +97,26 @@ def __init__( your own client library. always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. """ + if credentials_file is not None: + warnings.warn(general_helpers._CREDENTIALS_FILE_WARNING, DeprecationWarning) + + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: - host += ":443" + host += ":443" # pragma: NO COVER self._host = host scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} @@ -189,6 +226,37 @@ def close(self): """ raise NotImplementedError() + def _convert_protobuf_message_to_dict( + self, message: google.protobuf.message.Message + ): + r"""Converts protobuf message to a dictionary. + + When the dictionary is encoded to JSON, it conforms to proto3 JSON spec. + + Args: + message(google.protobuf.message.Message): The protocol buffers message + instance to serialize. + + Returns: + A dict representation of the protocol buffer message. + """ + # TODO(https://github.com/googleapis/python-api-core/issues/643): For backwards compatibility + # with protobuf 3.x 4.x, Remove once support for protobuf 3.x and 4.x is dropped. + if PROTOBUF_VERSION[0:2] in ["3.", "4."]: + result = json_format.MessageToDict( + message, + preserving_proto_field_name=True, + including_default_value_fields=True, # type: ignore # backward compatibility + ) + else: + result = json_format.MessageToDict( + message, + preserving_proto_field_name=True, + always_print_fields_with_no_presence=True, + ) + + return result + @property def list_operations( self, diff --git a/google/api_core/operations_v1/transports/rest.py b/google/api_core/operations_v1/transports/rest.py index 49f99d210..62f34d69f 100644 --- a/google/api_core/operations_v1/transports/rest.py +++ b/google/api_core/operations_v1/transports/rest.py @@ -14,30 +14,36 @@ # limitations under the License. # -import re from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 # type: ignore +import google.protobuf +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format # type: ignore +import grpc from requests import __version__ as requests_version from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore +from google.api_core import general_helpers from google.api_core import path_template # type: ignore from google.api_core import rest_helpers # type: ignore from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.requests import AuthorizedSession # type: ignore -from google.longrunning import operations_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import json_format # type: ignore -import grpc -from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, OperationsTransport + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .base import OperationsTransport + +PROTOBUF_VERSION = google.protobuf.__version__ OptionalRetry = Union[retries.Retry, object] DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, grpc_version=None, - rest_version=requests_version, + rest_version=f"requests@{requests_version}", ) @@ -66,7 +72,7 @@ def __init__( self, *, host: str = "longrunning.googleapis.com", - credentials: ga_credentials.Credentials = None, + credentials: Optional[ga_credentials.Credentials] = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, @@ -88,9 +94,22 @@ def __init__( are specified, the client will attempt to ascertain the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can + credentials_file (Optional[str]): Deprecated. A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of `google-api-core`. + + .. warning:: + Important: If you accept a credential configuration (credential JSON/File/Stream) + from an external source for authentication to Google Cloud Platform, you must + validate it before providing it to any Google API or client library. Providing an + unvalidated credential configuration to Google APIs or libraries can compromise + the security of your systems and data. For more information, refer to + `Validate credential configuration from external sources`_. + + .. _Validate credential configuration from external sources: + + https://cloud.google.com/docs/authentication/external/externally-sourced-credentials scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client @@ -115,20 +134,13 @@ def __init__( "v1" by default. """ + if credentials_file is not None: + warnings.warn(general_helpers._CREDENTIALS_FILE_WARNING, DeprecationWarning) + # Run the base constructor # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the # credentials object - maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) - if maybe_url_match is None: - raise ValueError( - f"Unexpected hostname structure: {host}" - ) # pragma: NO COVER - - url_match_items = maybe_url_match.groupdict() - - host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host - super().__init__( host=host, credentials=credentials, @@ -140,6 +152,7 @@ def __init__( ) if client_cert_source_for_mtls: self._session.configure_mtls_channel(client_cert_source_for_mtls) + # TODO(https://github.com/googleapis/python-api-core/issues/720): Add wrap logic directly to the property methods for callables. self._prep_wrapped_messages(client_info) self._http_options = http_options or {} self._path_prefix = path_prefix @@ -148,6 +161,8 @@ def _list_operations( self, request: operations_pb2.ListOperationsRequest, *, + # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry` + # to allow configuring retryable error codes. retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT, @@ -184,11 +199,7 @@ def _list_operations( "google.longrunning.Operations.ListOperations" ] - request_kwargs = json_format.MessageToDict( - request, - preserving_proto_field_name=True, - including_default_value_fields=True, - ) + request_kwargs = self._convert_protobuf_message_to_dict(request) transcoded_request = path_template.transcode(http_options, **request_kwargs) uri = transcoded_request["uri"] @@ -199,7 +210,6 @@ def _list_operations( json_format.ParseDict(transcoded_request["query_params"], query_params_request) query_params = json_format.MessageToDict( query_params_request, - including_default_value_fields=False, preserving_proto_field_name=False, use_integers_for_enums=False, ) @@ -207,6 +217,7 @@ def _list_operations( # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" + # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name. response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, @@ -228,6 +239,8 @@ def _get_operation( self, request: operations_pb2.GetOperationRequest, *, + # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry` + # to allow configuring retryable error codes. retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT, @@ -265,11 +278,7 @@ def _get_operation( "google.longrunning.Operations.GetOperation" ] - request_kwargs = json_format.MessageToDict( - request, - preserving_proto_field_name=True, - including_default_value_fields=True, - ) + request_kwargs = self._convert_protobuf_message_to_dict(request) transcoded_request = path_template.transcode(http_options, **request_kwargs) uri = transcoded_request["uri"] @@ -280,7 +289,6 @@ def _get_operation( json_format.ParseDict(transcoded_request["query_params"], query_params_request) query_params = json_format.MessageToDict( query_params_request, - including_default_value_fields=False, preserving_proto_field_name=False, use_integers_for_enums=False, ) @@ -288,6 +296,7 @@ def _get_operation( # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" + # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name. response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, @@ -309,6 +318,8 @@ def _delete_operation( self, request: operations_pb2.DeleteOperationRequest, *, + # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry` + # to allow configuring retryable error codes. retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT, @@ -339,11 +350,7 @@ def _delete_operation( "google.longrunning.Operations.DeleteOperation" ] - request_kwargs = json_format.MessageToDict( - request, - preserving_proto_field_name=True, - including_default_value_fields=True, - ) + request_kwargs = self._convert_protobuf_message_to_dict(request) transcoded_request = path_template.transcode(http_options, **request_kwargs) uri = transcoded_request["uri"] @@ -354,7 +361,6 @@ def _delete_operation( json_format.ParseDict(transcoded_request["query_params"], query_params_request) query_params = json_format.MessageToDict( query_params_request, - including_default_value_fields=False, preserving_proto_field_name=False, use_integers_for_enums=False, ) @@ -362,6 +368,7 @@ def _delete_operation( # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" + # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name. response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, @@ -380,6 +387,8 @@ def _cancel_operation( self, request: operations_pb2.CancelOperationRequest, *, + # TODO(https://github.com/googleapis/python-api-core/issues/723): Leverage `retry` + # to allow configuring retryable error codes. retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, compression: Optional[grpc.Compression] = gapic_v1.method.DEFAULT, @@ -411,11 +420,7 @@ def _cancel_operation( "google.longrunning.Operations.CancelOperation" ] - request_kwargs = json_format.MessageToDict( - request, - preserving_proto_field_name=True, - including_default_value_fields=True, - ) + request_kwargs = self._convert_protobuf_message_to_dict(request) transcoded_request = path_template.transcode(http_options, **request_kwargs) # Jsonify the request body @@ -423,7 +428,6 @@ def _cancel_operation( json_format.ParseDict(transcoded_request["body"], body_request) body = json_format.MessageToDict( body_request, - including_default_value_fields=False, preserving_proto_field_name=False, use_integers_for_enums=False, ) @@ -435,7 +439,6 @@ def _cancel_operation( json_format.ParseDict(transcoded_request["query_params"], query_params_request) query_params = json_format.MessageToDict( query_params_request, - including_default_value_fields=False, preserving_proto_field_name=False, use_integers_for_enums=False, ) @@ -443,6 +446,7 @@ def _cancel_operation( # Send the request headers = dict(metadata) headers["Content-Type"] = "application/json" + # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name. response = getattr(self._session, method)( "{host}{uri}".format(host=self._host, uri=uri), timeout=timeout, diff --git a/google/api_core/operations_v1/transports/rest_asyncio.py b/google/api_core/operations_v1/transports/rest_asyncio.py new file mode 100644 index 000000000..6fa9f56aa --- /dev/null +++ b/google/api_core/operations_v1/transports/rest_asyncio.py @@ -0,0 +1,581 @@ +# -*- coding: utf-8 -*- +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import json +from typing import Any, Callable, Coroutine, Dict, Optional, Sequence, Tuple +import warnings + +from google.auth import __version__ as auth_version + +try: + from google.auth.aio.transport.sessions import AsyncAuthorizedSession # type: ignore +except ImportError as e: # pragma: NO COVER + raise ImportError( + "The `async_rest` extra of `google-api-core` is required to use long-running operations. Install it by running " + "`pip install google-api-core[async_rest]`." + ) from e + +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import general_helpers +from google.api_core import path_template # type: ignore +from google.api_core import rest_helpers # type: ignore +from google.api_core import retry_async as retries_async # type: ignore +from google.auth.aio import credentials as ga_credentials_async # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import json_format # type: ignore + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO, OperationsTransport + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"google-auth@{auth_version}", +) + + +class AsyncOperationsRestTransport(OperationsTransport): + """Asynchronous REST backend transport for Operations. + + Manages async long-running operations with an API service. + + When an API method normally takes long time to complete, it can be + designed to return [Operation][google.api_core.operations_v1.Operation] to the + client, and the client can use this interface to receive the real + response asynchronously by polling the operation resource, or pass + the operation resource to another API (such as Google Cloud Pub/Sub + API) to receive the response. Any API service that returns + long-running operations should implement the ``Operations`` + interface so developers can have a consistent client experience. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "longrunning.googleapis.com", + credentials: Optional[ga_credentials_async.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + http_options: Optional[Dict] = None, + path_prefix: str = "v1", + # TODO(https://github.com/googleapis/python-api-core/issues/715): Add docstring for `credentials_file` to async REST transport. + # TODO(https://github.com/googleapis/python-api-core/issues/716): Add docstring for `scopes` to async REST transport. + # TODO(https://github.com/googleapis/python-api-core/issues/717): Add docstring for `quota_project_id` to async REST transport. + # TODO(https://github.com/googleapis/python-api-core/issues/718): Add docstring for `client_cert_source` to async REST transport. + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.aio.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of `google-api-core`. + + .. warning:: + Important: If you accept a credential configuration (credential JSON/File/Stream) + from an external source for authentication to Google Cloud Platform, you must + validate it before providing it to any Google API or client library. Providing an + unvalidated credential configuration to Google APIs or libraries can compromise + the security of your systems and data. For more information, refer to + `Validate credential configurations from external sources`_. + + .. _Validate credential configurations from external sources: + + https://cloud.google.com/docs/authentication/external/externally-sourced-credentials + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + http_options: a dictionary of http_options for transcoding, to override + the defaults from operations.proto. Each method has an entry + with the corresponding http rules as value. + path_prefix: path prefix (usually represents API version). Set to + "v1" by default. + + """ + if credentials_file is not None: + warnings.warn(general_helpers._CREDENTIALS_FILE_WARNING, DeprecationWarning) + + unsupported_params = { + # TODO(https://github.com/googleapis/python-api-core/issues/715): Add support for `credentials_file` to async REST transport. + "google.api_core.client_options.ClientOptions.credentials_file": credentials_file, + # TODO(https://github.com/googleapis/python-api-core/issues/716): Add support for `scopes` to async REST transport. + "google.api_core.client_options.ClientOptions.scopes": scopes, + # TODO(https://github.com/googleapis/python-api-core/issues/717): Add support for `quota_project_id` to async REST transport. + "google.api_core.client_options.ClientOptions.quota_project_id": quota_project_id, + # TODO(https://github.com/googleapis/python-api-core/issues/718): Add support for `client_cert_source` to async REST transport. + "google.api_core.client_options.ClientOptions.client_cert_source": client_cert_source_for_mtls, + # TODO(https://github.com/googleapis/python-api-core/issues/718): Add support for `client_cert_source` to async REST transport. + "google.api_core.client_options.ClientOptions.client_cert_source": client_cert_source_for_mtls, + } + provided_unsupported_params = [ + name for name, value in unsupported_params.items() if value is not None + ] + if provided_unsupported_params: + raise core_exceptions.AsyncRestUnsupportedParameterError( + f"The following provided parameters are not supported for `transport=rest_asyncio`: {', '.join(provided_unsupported_params)}" + ) + + super().__init__( + host=host, + # TODO(https://github.com/googleapis/python-api-core/issues/709): Remove `type: ignore` when the linked issue is resolved. + credentials=credentials, # type: ignore + client_info=client_info, + # TODO(https://github.com/googleapis/python-api-core/issues/725): Set always_use_jwt_access token when supported. + always_use_jwt_access=False, + ) + # TODO(https://github.com/googleapis/python-api-core/issues/708): add support for + # `default_host` in AsyncAuthorizedSession for feature parity with the synchronous + # code. + # TODO(https://github.com/googleapis/python-api-core/issues/709): Remove `type: ignore` when the linked issue is resolved. + self._session = AsyncAuthorizedSession(self._credentials) # type: ignore + # TODO(https://github.com/googleapis/python-api-core/issues/720): Add wrap logic directly to the property methods for callables. + self._prep_wrapped_messages(client_info) + self._http_options = http_options or {} + self._path_prefix = path_prefix + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.list_operations: gapic_v1.method_async.wrap_method( + self.list_operations, + default_retry=retries_async.AsyncRetry( + initial=0.5, + maximum=10.0, + multiplier=2.0, + predicate=retries_async.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=client_info, + kind="rest_asyncio", + ), + self.get_operation: gapic_v1.method_async.wrap_method( + self.get_operation, + default_retry=retries_async.AsyncRetry( + initial=0.5, + maximum=10.0, + multiplier=2.0, + predicate=retries_async.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=client_info, + kind="rest_asyncio", + ), + self.delete_operation: gapic_v1.method_async.wrap_method( + self.delete_operation, + default_retry=retries_async.AsyncRetry( + initial=0.5, + maximum=10.0, + multiplier=2.0, + predicate=retries_async.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=client_info, + kind="rest_asyncio", + ), + self.cancel_operation: gapic_v1.method_async.wrap_method( + self.cancel_operation, + default_retry=retries_async.AsyncRetry( + initial=0.5, + maximum=10.0, + multiplier=2.0, + predicate=retries_async.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=10.0, + ), + default_timeout=10.0, + client_info=client_info, + kind="rest_asyncio", + ), + } + + async def _list_operations( + self, + request: operations_pb2.ListOperationsRequest, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry` + # to allow configuring retryable error codes. + retry=gapic_v1.method_async.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Asynchronously call the list operations method over HTTP. + + Args: + request (~.operations_pb2.ListOperationsRequest): + The request object. The request message for + [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations]. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.ListOperationsResponse: + The response message for + [Operations.ListOperations][google.api_core.operations_v1.Operations.ListOperations]. + + """ + + http_options = [ + { + "method": "get", + "uri": "/{}/{{name=**}}/operations".format(self._path_prefix), + }, + ] + if "google.longrunning.Operations.ListOperations" in self._http_options: + http_options = self._http_options[ + "google.longrunning.Operations.ListOperations" + ] + + request_kwargs = self._convert_protobuf_message_to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params_request = operations_pb2.ListOperationsRequest() + json_format.ParseDict(transcoded_request["query_params"], query_params_request) + query_params = json_format.MessageToDict( + query_params_request, + preserving_proto_field_name=False, + use_integers_for_enums=False, + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name. + response = await getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + content = await response.read() + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=uri) + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + api_response = operations_pb2.ListOperationsResponse() + json_format.Parse(content, api_response, ignore_unknown_fields=False) + return api_response + + async def _get_operation( + self, + request: operations_pb2.GetOperationRequest, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry` + # to allow configuring retryable error codes. + retry=gapic_v1.method_async.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> operations_pb2.Operation: + r"""Asynchronously call the get operation method over HTTP. + + Args: + request (~.operations_pb2.GetOperationRequest): + The request object. The request message for + [Operations.GetOperation][google.api_core.operations_v1.Operations.GetOperation]. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.operations_pb2.Operation: + This resource represents a long- + running operation that is the result of a + network API call. + + """ + + http_options = [ + { + "method": "get", + "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix), + }, + ] + if "google.longrunning.Operations.GetOperation" in self._http_options: + http_options = self._http_options[ + "google.longrunning.Operations.GetOperation" + ] + + request_kwargs = self._convert_protobuf_message_to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params_request = operations_pb2.GetOperationRequest() + json_format.ParseDict(transcoded_request["query_params"], query_params_request) + query_params = json_format.MessageToDict( + query_params_request, + preserving_proto_field_name=False, + use_integers_for_enums=False, + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name. + response = await getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + content = await response.read() + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=uri) + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + # Return the response + api_response = operations_pb2.Operation() + json_format.Parse(content, api_response, ignore_unknown_fields=False) + return api_response + + async def _delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry` + # to allow configuring retryable error codes. + retry=gapic_v1.method_async.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> empty_pb2.Empty: + r"""Asynchronously call the delete operation method over HTTP. + + Args: + request (~.operations_pb2.DeleteOperationRequest): + The request object. The request message for + [Operations.DeleteOperation][google.api_core.operations_v1.Operations.DeleteOperation]. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options = [ + { + "method": "delete", + "uri": "/{}/{{name=**/operations/*}}".format(self._path_prefix), + }, + ] + if "google.longrunning.Operations.DeleteOperation" in self._http_options: + http_options = self._http_options[ + "google.longrunning.Operations.DeleteOperation" + ] + + request_kwargs = self._convert_protobuf_message_to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params_request = operations_pb2.DeleteOperationRequest() + json_format.ParseDict(transcoded_request["query_params"], query_params_request) + query_params = json_format.MessageToDict( + query_params_request, + preserving_proto_field_name=False, + use_integers_for_enums=False, + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name. + response = await getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=uri) + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + return empty_pb2.Empty() + + async def _cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + *, + # TODO(https://github.com/googleapis/python-api-core/issues/722): Leverage `retry` + # to allow configuring retryable error codes. + retry=gapic_v1.method_async.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, str]] = (), + # TODO(https://github.com/googleapis/python-api-core/issues/722): Add `retry` parameter + # to allow configuring retryable error codes. + ) -> empty_pb2.Empty: + r"""Asynchronously call the cancel operation method over HTTP. + + Args: + request (~.operations_pb2.CancelOperationRequest): + The request object. The request message for + [Operations.CancelOperation][google.api_core.operations_v1.Operations.CancelOperation]. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + + http_options = [ + { + "method": "post", + "uri": "/{}/{{name=**/operations/*}}:cancel".format(self._path_prefix), + "body": "*", + }, + ] + if "google.longrunning.Operations.CancelOperation" in self._http_options: + http_options = self._http_options[ + "google.longrunning.Operations.CancelOperation" + ] + + request_kwargs = self._convert_protobuf_message_to_dict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + + # Jsonify the request body + body_request = operations_pb2.CancelOperationRequest() + json_format.ParseDict(transcoded_request["body"], body_request) + body = json_format.MessageToDict( + body_request, + preserving_proto_field_name=False, + use_integers_for_enums=False, + ) + uri = transcoded_request["uri"] + method = transcoded_request["method"] + + # Jsonify the query params + query_params_request = operations_pb2.CancelOperationRequest() + json_format.ParseDict(transcoded_request["query_params"], query_params_request) + query_params = json_format.MessageToDict( + query_params_request, + preserving_proto_field_name=False, + use_integers_for_enums=False, + ) + + # Send the request + headers = dict(metadata) + headers["Content-Type"] = "application/json" + # TODO(https://github.com/googleapis/python-api-core/issues/721): Update incorrect use of `uri`` variable name. + response = await getattr(self._session, method)( + "{host}{uri}".format(host=self._host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params), + data=body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + content = await response.read() + payload = json.loads(content.decode("utf-8")) + request_url = "{host}{uri}".format(host=self._host, uri=uri) + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + + return empty_pb2.Empty() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Coroutine[Any, Any, operations_pb2.ListOperationsResponse], + ]: + return self._list_operations + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Coroutine[Any, Any, operations_pb2.Operation], + ]: + return self._get_operation + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], Coroutine[Any, Any, empty_pb2.Empty] + ]: + return self._delete_operation + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], Coroutine[Any, Any, empty_pb2.Empty] + ]: + return self._cancel_operation + + +__all__ = ("AsyncOperationsRestTransport",) diff --git a/google/api_core/protobuf_helpers.py b/google/api_core/protobuf_helpers.py index d777c5f89..30cd7c859 100644 --- a/google/api_core/protobuf_helpers.py +++ b/google/api_core/protobuf_helpers.py @@ -63,9 +63,7 @@ def from_any_pb(pb_type, any_pb): # Unpack the Any object and populate the protobuf message instance. if not any_pb.Unpack(msg_pb): raise TypeError( - "Could not convert {} to {}".format( - any_pb.__class__.__name__, pb_type.__name__ - ) + f"Could not convert `{any_pb.TypeName()}` with underlying type `google.protobuf.any_pb2.Any` to `{msg_pb.DESCRIPTOR.full_name}`" ) # Done; return the message. diff --git a/google/api_core/rest_streaming.py b/google/api_core/rest_streaming.py index f91381c14..84aa270c0 100644 --- a/google/api_core/rest_streaming.py +++ b/google/api_core/rest_streaming.py @@ -14,86 +14,43 @@ """Helpers for server-side streaming in REST.""" -from collections import deque -import string -from typing import Deque +from typing import Union +import proto import requests +import google.protobuf.message +from google.api_core._rest_streaming_base import BaseResponseIterator -class ResponseIterator: +class ResponseIterator(BaseResponseIterator): """Iterator over REST API responses. Args: response (requests.Response): An API response object. - response_message_cls (Callable[proto.Message]): A proto + response_message_cls (Union[proto.Message, google.protobuf.message.Message]): A response class expected to be returned from an API. + + Raises: + ValueError: + - If `response_message_cls` is not a subclass of `proto.Message` or `google.protobuf.message.Message`. """ - def __init__(self, response: requests.Response, response_message_cls): + def __init__( + self, + response: requests.Response, + response_message_cls: Union[proto.Message, google.protobuf.message.Message], + ): self._response = response - self._response_message_cls = response_message_cls # Inner iterator over HTTP response's content. self._response_itr = self._response.iter_content(decode_unicode=True) - # Contains a list of JSON responses ready to be sent to user. - self._ready_objs: Deque[str] = deque() - # Current JSON response being built. - self._obj = "" - # Keeps track of the nesting level within a JSON object. - self._level = 0 - # Keeps track whether HTTP response is currently sending values - # inside of a string value. - self._in_string = False - # Whether an escape symbol "\" was encountered. - self._escape_next = False + super(ResponseIterator, self).__init__( + response_message_cls=response_message_cls + ) def cancel(self): """Cancel existing streaming operation.""" self._response.close() - def _process_chunk(self, chunk: str): - if self._level == 0: - if chunk[0] != "[": - raise ValueError( - "Can only parse array of JSON objects, instead got %s" % chunk - ) - for char in chunk: - if char == "{": - if self._level == 1: - # Level 1 corresponds to the outermost JSON object - # (i.e. the one we care about). - self._obj = "" - if not self._in_string: - self._level += 1 - self._obj += char - elif char == "}": - self._obj += char - if not self._in_string: - self._level -= 1 - if not self._in_string and self._level == 1: - self._ready_objs.append(self._obj) - elif char == '"': - # Helps to deal with an escaped quotes inside of a string. - if not self._escape_next: - self._in_string = not self._in_string - self._obj += char - elif char in string.whitespace: - if self._in_string: - self._obj += char - elif char == "[": - if self._level == 0: - self._level += 1 - else: - self._obj += char - elif char == "]": - if self._level == 1: - self._level -= 1 - else: - self._obj += char - else: - self._obj += char - self._escape_next = not self._escape_next if char == "\\" else False - def __next__(self): while not self._ready_objs: try: @@ -105,9 +62,5 @@ def __next__(self): raise e return self._grab() - def _grab(self): - # Add extra quotes to make json.loads happy. - return self._response_message_cls.from_json(self._ready_objs.popleft()) - def __iter__(self): return self diff --git a/google/api_core/rest_streaming_async.py b/google/api_core/rest_streaming_async.py new file mode 100644 index 000000000..370c2b53f --- /dev/null +++ b/google/api_core/rest_streaming_async.py @@ -0,0 +1,89 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for asynchronous server-side streaming in REST.""" + +from typing import Union + +import proto + +try: + import google.auth.aio.transport +except ImportError as e: # pragma: NO COVER + raise ImportError( + "`google-api-core[async_rest]` is required to use asynchronous rest streaming. " + "Install the `async_rest` extra of `google-api-core` using " + "`pip install google-api-core[async_rest]`." + ) from e + +import google.protobuf.message +from google.api_core._rest_streaming_base import BaseResponseIterator + + +class AsyncResponseIterator(BaseResponseIterator): + """Asynchronous Iterator over REST API responses. + + Args: + response (google.auth.aio.transport.Response): An API response object. + response_message_cls (Union[proto.Message, google.protobuf.message.Message]): A response + class expected to be returned from an API. + + Raises: + ValueError: + - If `response_message_cls` is not a subclass of `proto.Message` or `google.protobuf.message.Message`. + """ + + def __init__( + self, + response: google.auth.aio.transport.Response, + response_message_cls: Union[proto.Message, google.protobuf.message.Message], + ): + self._response = response + self._chunk_size = 1024 + # TODO(https://github.com/googleapis/python-api-core/issues/703): mypy does not recognize the abstract content + # method as an async generator as it looks for the `yield` keyword in the implementation. + # Given that the abstract method is not implemented, mypy fails to recognize it as an async generator. + # mypy warnings are silenced until the linked issue is resolved. + self._response_itr = self._response.content(self._chunk_size).__aiter__() # type: ignore + super(AsyncResponseIterator, self).__init__( + response_message_cls=response_message_cls + ) + + async def __aenter__(self): + return self + + async def cancel(self): + """Cancel existing streaming operation.""" + await self._response.close() + + async def __anext__(self): + while not self._ready_objs: + try: + chunk = await self._response_itr.__anext__() + chunk = chunk.decode("utf-8") + self._process_chunk(chunk) + except StopAsyncIteration as e: + if self._level > 0: + raise ValueError("i Unfinished stream: %s" % self._obj) + raise e + except ValueError as e: + raise e + return self._grab() + + def __aiter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + """Cancel existing async streaming operation.""" + await self._response.close() diff --git a/google/api_core/retry.py b/google/api_core/retry.py deleted file mode 100644 index 08f8209e9..000000000 --- a/google/api_core/retry.py +++ /dev/null @@ -1,477 +0,0 @@ -# Copyright 2017 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Helpers for retrying functions with exponential back-off. - -The :class:`Retry` decorator can be used to retry functions that raise -exceptions using exponential backoff. Because a exponential sleep algorithm is -used, the retry is limited by a `deadline`. The deadline is the maximum amount -of time a method can block. This is used instead of total number of retries -because it is difficult to ascertain the amount of time a function can block -when using total number of retries and exponential backoff. - -By default, this decorator will retry transient -API errors (see :func:`if_transient_error`). For example: - -.. code-block:: python - - @retry.Retry() - def call_flaky_rpc(): - return client.flaky_rpc() - - # Will retry flaky_rpc() if it raises transient API errors. - result = call_flaky_rpc() - -You can pass a custom predicate to retry on different exceptions, such as -waiting for an eventually consistent item to be available: - -.. code-block:: python - - @retry.Retry(predicate=if_exception_type(exceptions.NotFound)) - def check_if_exists(): - return client.does_thing_exist() - - is_available = check_if_exists() - -Some client library methods apply retry automatically. These methods can accept -a ``retry`` parameter that allows you to configure the behavior: - -.. code-block:: python - - my_retry = retry.Retry(deadline=60) - result = client.some_method(retry=my_retry) - -""" - -from __future__ import annotations - -import datetime -import functools -import logging -import random -import sys -import time -import inspect -import warnings -from typing import Any, Callable, TypeVar, TYPE_CHECKING - -import requests.exceptions - -from google.api_core import datetime_helpers -from google.api_core import exceptions -from google.auth import exceptions as auth_exceptions - -if TYPE_CHECKING: - if sys.version_info >= (3, 10): - from typing import ParamSpec - else: - from typing_extensions import ParamSpec - - _P = ParamSpec("_P") - _R = TypeVar("_R") - -_LOGGER = logging.getLogger(__name__) -_DEFAULT_INITIAL_DELAY = 1.0 # seconds -_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds -_DEFAULT_DELAY_MULTIPLIER = 2.0 -_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds -_ASYNC_RETRY_WARNING = "Using the synchronous google.api_core.retry.Retry with asynchronous calls may lead to unexpected results. Please use google.api_core.retry_async.AsyncRetry instead." - - -def if_exception_type( - *exception_types: type[BaseException], -) -> Callable[[BaseException], bool]: - """Creates a predicate to check if the exception is of a given type. - - Args: - exception_types (Sequence[:func:`type`]): The exception types to check - for. - - Returns: - Callable[Exception]: A predicate that returns True if the provided - exception is of the given type(s). - """ - - def if_exception_type_predicate(exception: BaseException) -> bool: - """Bound predicate for checking an exception type.""" - return isinstance(exception, exception_types) - - return if_exception_type_predicate - - -# pylint: disable=invalid-name -# Pylint sees this as a constant, but it is also an alias that should be -# considered a function. -if_transient_error = if_exception_type( - exceptions.InternalServerError, - exceptions.TooManyRequests, - exceptions.ServiceUnavailable, - requests.exceptions.ConnectionError, - requests.exceptions.ChunkedEncodingError, - auth_exceptions.TransportError, -) -"""A predicate that checks if an exception is a transient API error. - -The following server errors are considered transient: - -- :class:`google.api_core.exceptions.InternalServerError` - HTTP 500, gRPC - ``INTERNAL(13)`` and its subclasses. -- :class:`google.api_core.exceptions.TooManyRequests` - HTTP 429 -- :class:`google.api_core.exceptions.ServiceUnavailable` - HTTP 503 -- :class:`requests.exceptions.ConnectionError` -- :class:`requests.exceptions.ChunkedEncodingError` - The server declared - chunked encoding but sent an invalid chunk. -- :class:`google.auth.exceptions.TransportError` - Used to indicate an - error occurred during an HTTP request. -""" -# pylint: enable=invalid-name - - -def exponential_sleep_generator(initial, maximum, multiplier=_DEFAULT_DELAY_MULTIPLIER): - """Generates sleep intervals based on the exponential back-off algorithm. - - This implements the `Truncated Exponential Back-off`_ algorithm. - - .. _Truncated Exponential Back-off: - https://cloud.google.com/storage/docs/exponential-backoff - - Args: - initial (float): The minimum amount of time to delay. This must - be greater than 0. - maximum (float): The maximum amount of time to delay. - multiplier (float): The multiplier applied to the delay. - - Yields: - float: successive sleep intervals. - """ - delay = min(initial, maximum) - while True: - yield random.uniform(0.0, delay) - delay = min(delay * multiplier, maximum) - - -def retry_target( - target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs -): - """Call a function and retry if it fails. - - This is the lowest-level retry helper. Generally, you'll use the - higher-level retry helper :class:`Retry`. - - Args: - target(Callable): The function to call and retry. This must be a - nullary function - apply arguments with `functools.partial`. - predicate (Callable[Exception]): A callable used to determine if an - exception raised by the target should be considered retryable. - It should return True to retry or False otherwise. - sleep_generator (Iterable[float]): An infinite iterator that determines - how long to sleep between retries. - timeout (float): How long to keep retrying the target. - on_error (Callable[Exception]): A function to call while processing a - retryable exception. Any error raised by this function will *not* - be caught. - deadline (float): DEPRECATED: use ``timeout`` instead. For backward - compatibility, if specified it will override ``timeout`` parameter. - - Returns: - Any: the return value of the target function. - - Raises: - google.api_core.RetryError: If the deadline is exceeded while retrying. - ValueError: If the sleep generator stops yielding values. - Exception: If the target raises a method that isn't retryable. - """ - - timeout = kwargs.get("deadline", timeout) - - if timeout is not None: - deadline = datetime_helpers.utcnow() + datetime.timedelta(seconds=timeout) - else: - deadline = None - - last_exc = None - - for sleep in sleep_generator: - try: - result = target() - if inspect.isawaitable(result): - warnings.warn(_ASYNC_RETRY_WARNING) - return result - - # pylint: disable=broad-except - # This function explicitly must deal with broad exceptions. - except Exception as exc: - if not predicate(exc): - raise - last_exc = exc - if on_error is not None: - on_error(exc) - - if deadline is not None: - next_attempt_time = datetime_helpers.utcnow() + datetime.timedelta( - seconds=sleep - ) - if deadline < next_attempt_time: - raise exceptions.RetryError( - "Deadline of {:.1f}s exceeded while calling target function".format( - timeout - ), - last_exc, - ) from last_exc - - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) - ) - time.sleep(sleep) - - raise ValueError("Sleep generator stopped yielding sleep values.") - - -class Retry(object): - """Exponential retry decorator. - - This class is a decorator used to add retry or polling behavior to an RPC - call. - - Although the default behavior is to retry transient API errors, a - different predicate can be provided to retry other exceptions. - - There two important concepts that retry/polling behavior may operate on, - Deadline and Timeout, which need to be properly defined for the correct - usage of this class and the rest of the library. - - Deadline: a fixed point in time by which a certain operation must - terminate. For example, if a certain operation has a deadline - "2022-10-18T23:30:52.123Z" it must terminate (successfully or with an - error) by that time, regardless of when it was started or whether it - was started at all. - - Timeout: the maximum duration of time after which a certain operation - must terminate (successfully or with an error). The countdown begins right - after an operation was started. For example, if an operation was started at - 09:24:00 with timeout of 75 seconds, it must terminate no later than - 09:25:15. - - Unfortunately, in the past this class (and the api-core library as a whole) has not been - properly distinguishing the concepts of "timeout" and "deadline", and the - ``deadline`` parameter has meant ``timeout``. That is why - ``deadline`` has been deprecated and ``timeout`` should be used instead. If the - ``deadline`` parameter is set, it will override the ``timeout`` parameter. In other words, - ``retry.deadline`` should be treated as just a deprecated alias for - ``retry.timeout``. - - Said another way, it is safe to assume that this class and the rest of this - library operate in terms of timeouts (not deadlines) unless explicitly - noted the usage of deadline semantics. - - It is also important to - understand the three most common applications of the Timeout concept in the - context of this library. - - Usually the generic Timeout term may stand for one of the following actual - timeouts: RPC Timeout, Retry Timeout, or Polling Timeout. - - RPC Timeout: a value supplied by the client to the server so - that the server side knows the maximum amount of time it is expected to - spend handling that specific RPC. For example, in the case of gRPC transport, - RPC Timeout is represented by setting "grpc-timeout" header in the HTTP2 - request. The `timeout` property of this class normally never represents the - RPC Timeout as it is handled separately by the ``google.api_core.timeout`` - module of this library. - - Retry Timeout: this is the most common meaning of the ``timeout`` property - of this class, and defines how long a certain RPC may be retried in case - the server returns an error. - - Polling Timeout: defines how long the - client side is allowed to call the polling RPC repeatedly to check a status of a - long-running operation. Each polling RPC is - expected to succeed (its errors are supposed to be handled by the retry - logic). The decision as to whether a new polling attempt needs to be made is based - not on the RPC status code but on the status of the returned - status of an operation. In other words: we will poll a long-running operation until the operation is done or the polling timeout expires. Each poll will inform us of the status of the operation. The poll consists of an RPC to the server that may itself be retried as per the poll-specific retry settings in case of errors. The operation-level retry settings do NOT apply to polling-RPC retries. - - With the actual timeout types being defined above, the client libraries - often refer to just Timeout without clarifying which type specifically - that is. In that case the actual timeout type (sometimes also referred to as - Logical Timeout) can be determined from the context. If it is a unary rpc - call (i.e. a regular one) Timeout usually stands for the RPC Timeout (if - provided directly as a standalone value) or Retry Timeout (if provided as - ``retry.timeout`` property of the unary RPC's retry config). For - ``Operation`` or ``PollingFuture`` in general Timeout stands for - Polling Timeout. - - Args: - predicate (Callable[Exception]): A callable that should return ``True`` - if the given exception is retryable. - initial (float): The minimum amount of time to delay in seconds. This - must be greater than 0. - maximum (float): The maximum amount of time to delay in seconds. - multiplier (float): The multiplier applied to the delay. - timeout (float): How long to keep retrying, in seconds. - deadline (float): DEPRECATED: use `timeout` instead. For backward - compatibility, if specified it will override the ``timeout`` parameter. - """ - - def __init__( - self, - predicate: Callable[[BaseException], bool] = if_transient_error, - initial: float = _DEFAULT_INITIAL_DELAY, - maximum: float = _DEFAULT_MAXIMUM_DELAY, - multiplier: float = _DEFAULT_DELAY_MULTIPLIER, - timeout: float = _DEFAULT_DEADLINE, - on_error: Callable[[BaseException], Any] | None = None, - **kwargs: Any, - ) -> None: - self._predicate = predicate - self._initial = initial - self._multiplier = multiplier - self._maximum = maximum - self._timeout = kwargs.get("deadline", timeout) - self._deadline = self._timeout - self._on_error = on_error - - def __call__( - self, - func: Callable[_P, _R], - on_error: Callable[[BaseException], Any] | None = None, - ) -> Callable[_P, _R]: - """Wrap a callable with retry behavior. - - Args: - func (Callable): The callable to add retry behavior to. - on_error (Callable[Exception]): A function to call while processing - a retryable exception. Any error raised by this function will - *not* be caught. - - Returns: - Callable: A callable that will invoke ``func`` with retry - behavior. - """ - if self._on_error is not None: - on_error = self._on_error - - @functools.wraps(func) - def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R: - """A wrapper that calls target function with retry.""" - target = functools.partial(func, *args, **kwargs) - sleep_generator = exponential_sleep_generator( - self._initial, self._maximum, multiplier=self._multiplier - ) - return retry_target( - target, - self._predicate, - sleep_generator, - self._timeout, - on_error=on_error, - ) - - return retry_wrapped_func - - @property - def deadline(self): - """ - DEPRECATED: use ``timeout`` instead. Refer to the ``Retry`` class - documentation for details. - """ - return self._timeout - - @property - def timeout(self): - return self._timeout - - def with_deadline(self, deadline): - """Return a copy of this retry with the given timeout. - - DEPRECATED: use :meth:`with_timeout` instead. Refer to the ``Retry`` class - documentation for details. - - Args: - deadline (float): How long to keep retrying in seconds. - - Returns: - Retry: A new retry instance with the given timeout. - """ - return self.with_timeout(timeout=deadline) - - def with_timeout(self, timeout): - """Return a copy of this retry with the given timeout. - - Args: - timeout (float): How long to keep retrying, in seconds. - - Returns: - Retry: A new retry instance with the given timeout. - """ - return Retry( - predicate=self._predicate, - initial=self._initial, - maximum=self._maximum, - multiplier=self._multiplier, - timeout=timeout, - on_error=self._on_error, - ) - - def with_predicate(self, predicate): - """Return a copy of this retry with the given predicate. - - Args: - predicate (Callable[Exception]): A callable that should return - ``True`` if the given exception is retryable. - - Returns: - Retry: A new retry instance with the given predicate. - """ - return Retry( - predicate=predicate, - initial=self._initial, - maximum=self._maximum, - multiplier=self._multiplier, - timeout=self._timeout, - on_error=self._on_error, - ) - - def with_delay(self, initial=None, maximum=None, multiplier=None): - """Return a copy of this retry with the given delay options. - - Args: - initial (float): The minimum amount of time to delay. This must - be greater than 0. - maximum (float): The maximum amount of time to delay. - multiplier (float): The multiplier applied to the delay. - - Returns: - Retry: A new retry instance with the given predicate. - """ - return Retry( - predicate=self._predicate, - initial=initial if initial is not None else self._initial, - maximum=maximum if maximum is not None else self._maximum, - multiplier=multiplier if multiplier is not None else self._multiplier, - timeout=self._timeout, - on_error=self._on_error, - ) - - def __str__(self): - return ( - "".format( - self._predicate, - self._initial, - self._maximum, - self._multiplier, - self._timeout, # timeout can be None, thus no {:.1f} - self._on_error, - ) - ) diff --git a/google/api_core/retry/__init__.py b/google/api_core/retry/__init__.py new file mode 100644 index 000000000..1724fdbd9 --- /dev/null +++ b/google/api_core/retry/__init__.py @@ -0,0 +1,52 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Retry implementation for Google API client libraries.""" + +from .retry_base import exponential_sleep_generator +from .retry_base import if_exception_type +from .retry_base import if_transient_error +from .retry_base import build_retry_error +from .retry_base import RetryFailureReason +from .retry_unary import Retry +from .retry_unary import retry_target +from .retry_unary_async import AsyncRetry +from .retry_unary_async import retry_target as retry_target_async +from .retry_streaming import StreamingRetry +from .retry_streaming import retry_target_stream +from .retry_streaming_async import AsyncStreamingRetry +from .retry_streaming_async import retry_target_stream as retry_target_stream_async + +# The following imports are for backwards compatibility with https://github.com/googleapis/python-api-core/blob/4d7d2edee2c108d43deb151e6e0fdceb56b73275/google/api_core/retry.py +# +# TODO: Revert these imports on the next major version release (https://github.com/googleapis/python-api-core/issues/576) +from google.api_core import datetime_helpers # noqa: F401 +from google.api_core import exceptions # noqa: F401 +from google.auth import exceptions as auth_exceptions # noqa: F401 + +__all__ = ( + "exponential_sleep_generator", + "if_exception_type", + "if_transient_error", + "build_retry_error", + "RetryFailureReason", + "Retry", + "AsyncRetry", + "StreamingRetry", + "AsyncStreamingRetry", + "retry_target", + "retry_target_async", + "retry_target_stream", + "retry_target_stream_async", +) diff --git a/google/api_core/retry/retry_base.py b/google/api_core/retry/retry_base.py new file mode 100644 index 000000000..263b4ccf3 --- /dev/null +++ b/google/api_core/retry/retry_base.py @@ -0,0 +1,370 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Shared classes and functions for retrying requests. + +:class:`_BaseRetry` is the base class for :class:`Retry`, +:class:`AsyncRetry`, :class:`StreamingRetry`, and :class:`AsyncStreamingRetry`. +""" + +from __future__ import annotations + +import logging +import random +import time + +from enum import Enum +from typing import Any, Callable, Optional, Iterator, TYPE_CHECKING + +import requests.exceptions + +from google.api_core import exceptions +from google.auth import exceptions as auth_exceptions + +if TYPE_CHECKING: + import sys + + if sys.version_info >= (3, 11): + from typing import Self + else: + from typing_extensions import Self + +_DEFAULT_INITIAL_DELAY = 1.0 # seconds +_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds +_DEFAULT_DELAY_MULTIPLIER = 2.0 +_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds + +_LOGGER = logging.getLogger("google.api_core.retry") + + +def if_exception_type( + *exception_types: type[Exception], +) -> Callable[[Exception], bool]: + """Creates a predicate to check if the exception is of a given type. + + Args: + exception_types (Sequence[:func:`type`]): The exception types to check + for. + + Returns: + Callable[Exception]: A predicate that returns True if the provided + exception is of the given type(s). + """ + + def if_exception_type_predicate(exception: Exception) -> bool: + """Bound predicate for checking an exception type.""" + return isinstance(exception, exception_types) + + return if_exception_type_predicate + + +# pylint: disable=invalid-name +# Pylint sees this as a constant, but it is also an alias that should be +# considered a function. +if_transient_error = if_exception_type( + exceptions.InternalServerError, + exceptions.TooManyRequests, + exceptions.ServiceUnavailable, + requests.exceptions.ConnectionError, + requests.exceptions.ChunkedEncodingError, + auth_exceptions.TransportError, +) +"""A predicate that checks if an exception is a transient API error. + +The following server errors are considered transient: + +- :class:`google.api_core.exceptions.InternalServerError` - HTTP 500, gRPC + ``INTERNAL(13)`` and its subclasses. +- :class:`google.api_core.exceptions.TooManyRequests` - HTTP 429 +- :class:`google.api_core.exceptions.ServiceUnavailable` - HTTP 503 +- :class:`requests.exceptions.ConnectionError` +- :class:`requests.exceptions.ChunkedEncodingError` - The server declared + chunked encoding but sent an invalid chunk. +- :class:`google.auth.exceptions.TransportError` - Used to indicate an + error occurred during an HTTP request. +""" +# pylint: enable=invalid-name + + +def exponential_sleep_generator( + initial: float, maximum: float, multiplier: float = _DEFAULT_DELAY_MULTIPLIER +): + """Generates sleep intervals based on the exponential back-off algorithm. + + This implements the `Truncated Exponential Back-off`_ algorithm. + + .. _Truncated Exponential Back-off: + https://cloud.google.com/storage/docs/exponential-backoff + + Args: + initial (float): The minimum amount of time to delay. This must + be greater than 0. + maximum (float): The maximum amount of time to delay. + multiplier (float): The multiplier applied to the delay. + + Yields: + float: successive sleep intervals. + """ + max_delay = min(initial, maximum) + while True: + yield random.uniform(0.0, max_delay) + max_delay = min(max_delay * multiplier, maximum) + + +class RetryFailureReason(Enum): + """ + The cause of a failed retry, used when building exceptions + """ + + TIMEOUT = 0 + NON_RETRYABLE_ERROR = 1 + + +def build_retry_error( + exc_list: list[Exception], + reason: RetryFailureReason, + timeout_val: float | None, + **kwargs: Any, +) -> tuple[Exception, Exception | None]: + """ + Default exception_factory implementation. + + Returns a RetryError if the failure is due to a timeout, otherwise + returns the last exception encountered. + + Args: + - exc_list: list of exceptions that occurred during the retry + - reason: reason for the retry failure. + Can be TIMEOUT or NON_RETRYABLE_ERROR + - timeout_val: the original timeout value for the retry (in seconds), for use in the exception message + + Returns: + - tuple: a tuple of the exception to be raised, and the cause exception if any + """ + if reason == RetryFailureReason.TIMEOUT: + # return RetryError with the most recent exception as the cause + src_exc = exc_list[-1] if exc_list else None + timeout_val_str = f"of {timeout_val:0.1f}s " if timeout_val is not None else "" + return ( + exceptions.RetryError( + f"Timeout {timeout_val_str}exceeded", + src_exc, + ), + src_exc, + ) + elif exc_list: + # return most recent exception encountered + return exc_list[-1], None + else: + # no exceptions were given in exc_list. Raise generic RetryError + return exceptions.RetryError("Unknown error", None), None + + +def _retry_error_helper( + exc: Exception, + deadline: float | None, + sleep_iterator: Iterator[float], + error_list: list[Exception], + predicate_fn: Callable[[Exception], bool], + on_error_fn: Callable[[Exception], None] | None, + exc_factory_fn: Callable[ + [list[Exception], RetryFailureReason, float | None], + tuple[Exception, Exception | None], + ], + original_timeout: float | None, +) -> float: + """ + Shared logic for handling an error for all retry implementations + + - Raises an error on timeout or non-retryable error + - Calls on_error_fn if provided + - Logs the error + + Args: + - exc: the exception that was raised + - deadline: the deadline for the retry, calculated as a diff from time.monotonic() + - sleep_iterator: iterator to draw the next backoff value from + - error_list: the list of exceptions that have been raised so far + - predicate_fn: takes `exc` and returns true if the operation should be retried + - on_error_fn: callback to execute when a retryable error occurs + - exc_factory_fn: callback used to build the exception to be raised on terminal failure + - original_timeout_val: the original timeout value for the retry (in seconds), + to be passed to the exception factory for building an error message + Returns: + - the sleep value chosen before the next attempt + """ + error_list.append(exc) + if not predicate_fn(exc): + final_exc, source_exc = exc_factory_fn( + error_list, + RetryFailureReason.NON_RETRYABLE_ERROR, + original_timeout, + ) + raise final_exc from source_exc + if on_error_fn is not None: + on_error_fn(exc) + # next_sleep is fetched after the on_error callback, to allow clients + # to update sleep_iterator values dynamically in response to errors + try: + next_sleep = next(sleep_iterator) + except StopIteration: + raise ValueError("Sleep generator stopped yielding sleep values.") from exc + if deadline is not None and time.monotonic() + next_sleep > deadline: + final_exc, source_exc = exc_factory_fn( + error_list, + RetryFailureReason.TIMEOUT, + original_timeout, + ) + raise final_exc from source_exc + _LOGGER.debug( + "Retrying due to {}, sleeping {:.1f}s ...".format(error_list[-1], next_sleep) + ) + return next_sleep + + +class _BaseRetry(object): + """ + Base class for retry configuration objects. This class is intended to capture retry + and backoff configuration that is common to both synchronous and asynchronous retries, + for both unary and streaming RPCs. It is not intended to be instantiated directly, + but rather to be subclassed by the various retry configuration classes. + """ + + def __init__( + self, + predicate: Callable[[Exception], bool] = if_transient_error, + initial: float = _DEFAULT_INITIAL_DELAY, + maximum: float = _DEFAULT_MAXIMUM_DELAY, + multiplier: float = _DEFAULT_DELAY_MULTIPLIER, + timeout: Optional[float] = _DEFAULT_DEADLINE, + on_error: Optional[Callable[[Exception], Any]] = None, + **kwargs: Any, + ) -> None: + self._predicate = predicate + self._initial = initial + self._multiplier = multiplier + self._maximum = maximum + self._timeout = kwargs.get("deadline", timeout) + self._deadline = self._timeout + self._on_error = on_error + + def __call__(self, *args, **kwargs) -> Any: + raise NotImplementedError("Not implemented in base class") + + @property + def deadline(self) -> float | None: + """ + DEPRECATED: use ``timeout`` instead. Refer to the ``Retry`` class + documentation for details. + """ + return self._timeout + + @property + def timeout(self) -> float | None: + return self._timeout + + def with_deadline(self, deadline: float | None) -> Self: + """Return a copy of this retry with the given timeout. + + DEPRECATED: use :meth:`with_timeout` instead. Refer to the ``Retry`` class + documentation for details. + + Args: + deadline (float|None): How long to keep retrying, in seconds. If None, + no timeout is enforced. + + Returns: + Retry: A new retry instance with the given timeout. + """ + return self.with_timeout(deadline) + + def with_timeout(self, timeout: float | None) -> Self: + """Return a copy of this retry with the given timeout. + + Args: + timeout (float): How long to keep retrying, in seconds. If None, + no timeout will be enforced. + + Returns: + Retry: A new retry instance with the given timeout. + """ + return type(self)( + predicate=self._predicate, + initial=self._initial, + maximum=self._maximum, + multiplier=self._multiplier, + timeout=timeout, + on_error=self._on_error, + ) + + def with_predicate(self, predicate: Callable[[Exception], bool]) -> Self: + """Return a copy of this retry with the given predicate. + + Args: + predicate (Callable[Exception]): A callable that should return + ``True`` if the given exception is retryable. + + Returns: + Retry: A new retry instance with the given predicate. + """ + return type(self)( + predicate=predicate, + initial=self._initial, + maximum=self._maximum, + multiplier=self._multiplier, + timeout=self._timeout, + on_error=self._on_error, + ) + + def with_delay( + self, + initial: Optional[float] = None, + maximum: Optional[float] = None, + multiplier: Optional[float] = None, + ) -> Self: + """Return a copy of this retry with the given delay options. + + Args: + initial (float): The minimum amount of time to delay (in seconds). This must + be greater than 0. If None, the current value is used. + maximum (float): The maximum amount of time to delay (in seconds). If None, the + current value is used. + multiplier (float): The multiplier applied to the delay. If None, the current + value is used. + + Returns: + Retry: A new retry instance with the given delay options. + """ + return type(self)( + predicate=self._predicate, + initial=initial if initial is not None else self._initial, + maximum=maximum if maximum is not None else self._maximum, + multiplier=multiplier if multiplier is not None else self._multiplier, + timeout=self._timeout, + on_error=self._on_error, + ) + + def __str__(self) -> str: + return ( + "<{} predicate={}, initial={:.1f}, maximum={:.1f}, " + "multiplier={:.1f}, timeout={}, on_error={}>".format( + type(self).__name__, + self._predicate, + self._initial, + self._maximum, + self._multiplier, + self._timeout, # timeout can be None, thus no {:.1f} + self._on_error, + ) + ) diff --git a/google/api_core/retry/retry_streaming.py b/google/api_core/retry/retry_streaming.py new file mode 100644 index 000000000..e4474c8af --- /dev/null +++ b/google/api_core/retry/retry_streaming.py @@ -0,0 +1,264 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Generator wrapper for retryable streaming RPCs. +""" +from __future__ import annotations + +from typing import ( + Callable, + Optional, + List, + Tuple, + Iterable, + Generator, + TypeVar, + Any, + TYPE_CHECKING, +) + +import sys +import time +import functools + +from google.api_core.retry.retry_base import _BaseRetry +from google.api_core.retry.retry_base import _retry_error_helper +from google.api_core.retry import exponential_sleep_generator +from google.api_core.retry import build_retry_error +from google.api_core.retry import RetryFailureReason + +if TYPE_CHECKING: + if sys.version_info >= (3, 10): + from typing import ParamSpec + else: + from typing_extensions import ParamSpec + + _P = ParamSpec("_P") # target function call parameters + _Y = TypeVar("_Y") # yielded values + + +def retry_target_stream( + target: Callable[_P, Iterable[_Y]], + predicate: Callable[[Exception], bool], + sleep_generator: Iterable[float], + timeout: Optional[float] = None, + on_error: Optional[Callable[[Exception], None]] = None, + exception_factory: Callable[ + [List[Exception], RetryFailureReason, Optional[float]], + Tuple[Exception, Optional[Exception]], + ] = build_retry_error, + init_args: tuple = (), + init_kwargs: dict = {}, + **kwargs, +) -> Generator[_Y, Any, None]: + """Create a generator wrapper that retries the wrapped stream if it fails. + + This is the lowest-level retry helper. Generally, you'll use the + higher-level retry helper :class:`Retry`. + + Args: + target: The generator function to call and retry. + predicate: A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + sleep_generator: An infinite iterator that determines + how long to sleep between retries. + timeout: How long to keep retrying the target. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error: If given, the on_error callback will be called with each + retryable exception raised by the target. Any error raised by this + function will *not* be caught. + exception_factory: A function that is called when the retryable reaches + a terminal failure state, used to construct an exception to be raised. + It takes a list of all exceptions encountered, a retry.RetryFailureReason + enum indicating the failure cause, and the original timeout value + as arguments. It should return a tuple of the exception to be raised, + along with the cause exception if any. The default implementation will raise + a RetryError on timeout, or the last exception encountered otherwise. + init_args: Positional arguments to pass to the target function. + init_kwargs: Keyword arguments to pass to the target function. + + Returns: + Generator: A retryable generator that wraps the target generator function. + + Raises: + ValueError: If the sleep generator stops yielding values. + Exception: a custom exception specified by the exception_factory if provided. + If no exception_factory is provided: + google.api_core.RetryError: If the timeout is exceeded while retrying. + Exception: If the target raises an error that isn't retryable. + """ + + timeout = kwargs.get("deadline", timeout) + deadline: Optional[float] = ( + time.monotonic() + timeout if timeout is not None else None + ) + error_list: list[Exception] = [] + sleep_iter = iter(sleep_generator) + + # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper + # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535 + while True: + # Start a new retry loop + try: + # Note: in the future, we can add a ResumptionStrategy object + # to generate new args between calls. For now, use the same args + # for each attempt. + subgenerator = target(*init_args, **init_kwargs) + return (yield from subgenerator) + # handle exceptions raised by the subgenerator + # pylint: disable=broad-except + # This function explicitly must deal with broad exceptions. + except Exception as exc: + # defer to shared logic for handling errors + next_sleep = _retry_error_helper( + exc, + deadline, + sleep_iter, + error_list, + predicate, + on_error, + exception_factory, + timeout, + ) + # if exception not raised, sleep before next attempt + time.sleep(next_sleep) + + +class StreamingRetry(_BaseRetry): + """Exponential retry decorator for streaming synchronous RPCs. + + This class returns a Generator when called, which wraps the target + stream in retry logic. If any exception is raised by the target, the + entire stream will be retried within the wrapper. + + Although the default behavior is to retry transient API errors, a + different predicate can be provided to retry other exceptions. + + Important Note: when a stream encounters a retryable error, it will + silently construct a fresh iterator instance in the background + and continue yielding (likely duplicate) values as if no error occurred. + This is the most general way to retry a stream, but it often is not the + desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] + + There are two ways to build more advanced retry logic for streams: + + 1. Wrap the target + Use a ``target`` that maintains state between retries, and creates a + different generator on each retry call. For example, you can wrap a + network call in a function that modifies the request based on what has + already been returned: + + .. code-block:: python + + def attempt_with_modified_request(target, request, seen_items=[]): + # remove seen items from request on each attempt + new_request = modify_request(request, seen_items) + new_generator = target(new_request) + for item in new_generator: + yield item + seen_items.append(item) + + retry_wrapped_fn = StreamingRetry()(attempt_with_modified_request) + retryable_generator = retry_wrapped_fn(target, request) + + 2. Wrap the retry generator + Alternatively, you can wrap the retryable generator itself before + passing it to the end-user to add a filter on the stream. For + example, you can keep track of the items that were successfully yielded + in previous retry attempts, and only yield new items when the + new attempt surpasses the previous ones: + + .. code-block:: python + + def retryable_with_filter(target): + stream_idx = 0 + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx + stream_idx = 0 + # build retryable + retryable_gen = StreamingRetry(...)(target) + # keep track of what has been yielded out of filter + seen_items = [] + for item in retryable_gen(): + if stream_idx >= len(seen_items): + seen_items.append(item) + yield item + elif item != seen_items[stream_idx]: + raise ValueError("Stream differs from last attempt") + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) + + Args: + predicate (Callable[Exception]): A callable that should return ``True`` + if the given exception is retryable. + initial (float): The minimum amount of time to delay in seconds. This + must be greater than 0. + maximum (float): The maximum amount of time to delay in seconds. + multiplier (float): The multiplier applied to the delay. + timeout (float): How long to keep retrying, in seconds. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error (Callable[Exception]): A function to call while processing + a retryable exception. Any error raised by this function will + *not* be caught. + deadline (float): DEPRECATED: use `timeout` instead. For backward + compatibility, if specified it will override the ``timeout`` parameter. + """ + + def __call__( + self, + func: Callable[_P, Iterable[_Y]], + on_error: Callable[[Exception], Any] | None = None, + ) -> Callable[_P, Generator[_Y, Any, None]]: + """Wrap a callable with retry behavior. + + Args: + func (Callable): The callable to add retry behavior to. + on_error (Optional[Callable[Exception]]): If given, the + on_error callback will be called with each retryable exception + raised by the wrapped function. Any error raised by this + function will *not* be caught. If on_error was specified in the + constructor, this value will be ignored. + + Returns: + Callable: A callable that will invoke ``func`` with retry + behavior. + """ + if self._on_error is not None: + on_error = self._on_error + + @functools.wraps(func) + def retry_wrapped_func( + *args: _P.args, **kwargs: _P.kwargs + ) -> Generator[_Y, Any, None]: + """A wrapper that calls target function with retry.""" + sleep_generator = exponential_sleep_generator( + self._initial, self._maximum, multiplier=self._multiplier + ) + return retry_target_stream( + func, + predicate=self._predicate, + sleep_generator=sleep_generator, + timeout=self._timeout, + on_error=on_error, + init_args=args, + init_kwargs=kwargs, + ) + + return retry_wrapped_func diff --git a/google/api_core/retry/retry_streaming_async.py b/google/api_core/retry/retry_streaming_async.py new file mode 100644 index 000000000..5e5fa2402 --- /dev/null +++ b/google/api_core/retry/retry_streaming_async.py @@ -0,0 +1,328 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Generator wrapper for retryable async streaming RPCs. +""" +from __future__ import annotations + +from typing import ( + cast, + Any, + Callable, + Iterable, + AsyncIterator, + AsyncIterable, + Awaitable, + TypeVar, + AsyncGenerator, + TYPE_CHECKING, +) + +import asyncio +import time +import sys +import functools + +from google.api_core.retry.retry_base import _BaseRetry +from google.api_core.retry.retry_base import _retry_error_helper +from google.api_core.retry import exponential_sleep_generator +from google.api_core.retry import build_retry_error +from google.api_core.retry import RetryFailureReason + + +if TYPE_CHECKING: + if sys.version_info >= (3, 10): + from typing import ParamSpec + else: + from typing_extensions import ParamSpec + + _P = ParamSpec("_P") # target function call parameters + _Y = TypeVar("_Y") # yielded values + + +async def retry_target_stream( + target: Callable[_P, AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]]], + predicate: Callable[[Exception], bool], + sleep_generator: Iterable[float], + timeout: float | None = None, + on_error: Callable[[Exception], None] | None = None, + exception_factory: Callable[ + [list[Exception], RetryFailureReason, float | None], + tuple[Exception, Exception | None], + ] = build_retry_error, + init_args: tuple = (), + init_kwargs: dict = {}, + **kwargs, +) -> AsyncGenerator[_Y, None]: + """Create a generator wrapper that retries the wrapped stream if it fails. + + This is the lowest-level retry helper. Generally, you'll use the + higher-level retry helper :class:`AsyncRetry`. + + Args: + target: The generator function to call and retry. + predicate: A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + sleep_generator: An infinite iterator that determines + how long to sleep between retries. + timeout: How long to keep retrying the target. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error: If given, the on_error callback will be called with each + retryable exception raised by the target. Any error raised by this + function will *not* be caught. + exception_factory: A function that is called when the retryable reaches + a terminal failure state, used to construct an exception to be raised. + It takes a list of all exceptions encountered, a retry.RetryFailureReason + enum indicating the failure cause, and the original timeout value + as arguments. It should return a tuple of the exception to be raised, + along with the cause exception if any. The default implementation will raise + a RetryError on timeout, or the last exception encountered otherwise. + init_args: Positional arguments to pass to the target function. + init_kwargs: Keyword arguments to pass to the target function. + + Returns: + AsyncGenerator: A retryable generator that wraps the target generator function. + + Raises: + ValueError: If the sleep generator stops yielding values. + Exception: a custom exception specified by the exception_factory if provided. + If no exception_factory is provided: + google.api_core.RetryError: If the timeout is exceeded while retrying. + Exception: If the target raises an error that isn't retryable. + """ + target_iterator: AsyncIterator[_Y] | None = None + timeout = kwargs.get("deadline", timeout) + deadline = time.monotonic() + timeout if timeout else None + # keep track of retryable exceptions we encounter to pass in to exception_factory + error_list: list[Exception] = [] + sleep_iter = iter(sleep_generator) + target_is_generator: bool | None = None + + # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper + # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535 + while True: + # Start a new retry loop + try: + # Note: in the future, we can add a ResumptionStrategy object + # to generate new args between calls. For now, use the same args + # for each attempt. + target_output: AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]] = target( + *init_args, **init_kwargs + ) + try: + # gapic functions return the generator behind an awaitable + # unwrap the awaitable so we can work with the generator directly + target_output = await target_output # type: ignore + except TypeError: + # was not awaitable, continue + pass + target_iterator = cast(AsyncIterable["_Y"], target_output).__aiter__() + + if target_is_generator is None: + # Check if target supports generator features (asend, athrow, aclose) + target_is_generator = bool(getattr(target_iterator, "asend", None)) + + sent_in = None + while True: + ## Read from target_iterator + # If the target is a generator, we will advance it with `asend` + # otherwise, we will use `anext` + if target_is_generator: + next_value = await target_iterator.asend(sent_in) # type: ignore + else: + next_value = await target_iterator.__anext__() + ## Yield from Wrapper to caller + try: + # yield latest value from target + # exceptions from `athrow` and `aclose` are injected here + sent_in = yield next_value + except GeneratorExit: + # if wrapper received `aclose` while waiting on yield, + # it will raise GeneratorExit here + if target_is_generator: + # pass to inner target_iterator for handling + await cast(AsyncGenerator["_Y", None], target_iterator).aclose() + else: + raise + return + except: # noqa: E722 + # bare except catches any exception passed to `athrow` + if target_is_generator: + # delegate error handling to target_iterator + await cast(AsyncGenerator["_Y", None], target_iterator).athrow( + cast(BaseException, sys.exc_info()[1]) + ) + else: + raise + return + except StopAsyncIteration: + # if iterator exhausted, return + return + # handle exceptions raised by the target_iterator + # pylint: disable=broad-except + # This function explicitly must deal with broad exceptions. + except Exception as exc: + # defer to shared logic for handling errors + next_sleep = _retry_error_helper( + exc, + deadline, + sleep_iter, + error_list, + predicate, + on_error, + exception_factory, + timeout, + ) + # if exception not raised, sleep before next attempt + await asyncio.sleep(next_sleep) + + finally: + if target_is_generator and target_iterator is not None: + await cast(AsyncGenerator["_Y", None], target_iterator).aclose() + + +class AsyncStreamingRetry(_BaseRetry): + """Exponential retry decorator for async streaming rpcs. + + This class returns an AsyncGenerator when called, which wraps the target + stream in retry logic. If any exception is raised by the target, the + entire stream will be retried within the wrapper. + + Although the default behavior is to retry transient API errors, a + different predicate can be provided to retry other exceptions. + + Important Note: when a stream is encounters a retryable error, it will + silently construct a fresh iterator instance in the background + and continue yielding (likely duplicate) values as if no error occurred. + This is the most general way to retry a stream, but it often is not the + desired behavior. Example: iter([1, 2, 1/0]) -> [1, 2, 1, 2, ...] + + There are two ways to build more advanced retry logic for streams: + + 1. Wrap the target + Use a ``target`` that maintains state between retries, and creates a + different generator on each retry call. For example, you can wrap a + grpc call in a function that modifies the request based on what has + already been returned: + + .. code-block:: python + + async def attempt_with_modified_request(target, request, seen_items=[]): + # remove seen items from request on each attempt + new_request = modify_request(request, seen_items) + new_generator = await target(new_request) + async for item in new_generator: + yield item + seen_items.append(item) + + retry_wrapped = AsyncRetry(is_stream=True,...)(attempt_with_modified_request, target, request, []) + + 2. Wrap the retry generator + Alternatively, you can wrap the retryable generator itself before + passing it to the end-user to add a filter on the stream. For + example, you can keep track of the items that were successfully yielded + in previous retry attempts, and only yield new items when the + new attempt surpasses the previous ones: + + .. code-block:: python + + async def retryable_with_filter(target): + stream_idx = 0 + # reset stream_idx when the stream is retried + def on_error(e): + nonlocal stream_idx + stream_idx = 0 + # build retryable + retryable_gen = AsyncRetry(is_stream=True, ...)(target) + # keep track of what has been yielded out of filter + seen_items = [] + async for item in retryable_gen: + if stream_idx >= len(seen_items): + yield item + seen_items.append(item) + elif item != previous_stream[stream_idx]: + raise ValueError("Stream differs from last attempt")" + stream_idx += 1 + + filter_retry_wrapped = retryable_with_filter(target) + + Args: + predicate (Callable[Exception]): A callable that should return ``True`` + if the given exception is retryable. + initial (float): The minimum amount of time to delay in seconds. This + must be greater than 0. + maximum (float): The maximum amount of time to delay in seconds. + multiplier (float): The multiplier applied to the delay. + timeout (Optional[float]): How long to keep retrying in seconds. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error (Optional[Callable[Exception]]): A function to call while processing + a retryable exception. Any error raised by this function will + *not* be caught. + is_stream (bool): Indicates whether the input function + should be treated as a stream function (i.e. an AsyncGenerator, + or function or coroutine that returns an AsyncIterable). + If True, the iterable will be wrapped with retry logic, and any + failed outputs will restart the stream. If False, only the input + function call itself will be retried. Defaults to False. + To avoid duplicate values, retryable streams should typically be + wrapped in additional filter logic before use. + deadline (float): DEPRECATED use ``timeout`` instead. If set it will + override ``timeout`` parameter. + """ + + def __call__( + self, + func: Callable[..., AsyncIterable[_Y] | Awaitable[AsyncIterable[_Y]]], + on_error: Callable[[Exception], Any] | None = None, + ) -> Callable[_P, Awaitable[AsyncGenerator[_Y, None]]]: + """Wrap a callable with retry behavior. + + Args: + func (Callable): The callable or stream to add retry behavior to. + on_error (Optional[Callable[Exception]]): If given, the + on_error callback will be called with each retryable exception + raised by the wrapped function. Any error raised by this + function will *not* be caught. If on_error was specified in the + constructor, this value will be ignored. + + Returns: + Callable: A callable that will invoke ``func`` with retry + behavior. + """ + if self._on_error is not None: + on_error = self._on_error + + @functools.wraps(func) + async def retry_wrapped_func( + *args: _P.args, **kwargs: _P.kwargs + ) -> AsyncGenerator[_Y, None]: + """A wrapper that calls target function with retry.""" + sleep_generator = exponential_sleep_generator( + self._initial, self._maximum, multiplier=self._multiplier + ) + return retry_target_stream( + func, + self._predicate, + sleep_generator, + self._timeout, + on_error, + init_args=args, + init_kwargs=kwargs, + ) + + return retry_wrapped_func diff --git a/google/api_core/retry/retry_unary.py b/google/api_core/retry/retry_unary.py new file mode 100644 index 000000000..6d36bc7d1 --- /dev/null +++ b/google/api_core/retry/retry_unary.py @@ -0,0 +1,302 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for retrying functions with exponential back-off. + +The :class:`Retry` decorator can be used to retry functions that raise +exceptions using exponential backoff. Because a exponential sleep algorithm is +used, the retry is limited by a `timeout`. The timeout determines the window +in which retries will be attempted. This is used instead of total number of retries +because it is difficult to ascertain the amount of time a function can block +when using total number of retries and exponential backoff. + +By default, this decorator will retry transient +API errors (see :func:`if_transient_error`). For example: + +.. code-block:: python + + @retry.Retry() + def call_flaky_rpc(): + return client.flaky_rpc() + + # Will retry flaky_rpc() if it raises transient API errors. + result = call_flaky_rpc() + +You can pass a custom predicate to retry on different exceptions, such as +waiting for an eventually consistent item to be available: + +.. code-block:: python + + @retry.Retry(predicate=if_exception_type(exceptions.NotFound)) + def check_if_exists(): + return client.does_thing_exist() + + is_available = check_if_exists() + +Some client library methods apply retry automatically. These methods can accept +a ``retry`` parameter that allows you to configure the behavior: + +.. code-block:: python + + my_retry = retry.Retry(timeout=60) + result = client.some_method(retry=my_retry) + +""" + +from __future__ import annotations + +import functools +import sys +import time +import inspect +import warnings +from typing import Any, Callable, Iterable, TypeVar, TYPE_CHECKING + +from google.api_core.retry.retry_base import _BaseRetry +from google.api_core.retry.retry_base import _retry_error_helper +from google.api_core.retry.retry_base import exponential_sleep_generator +from google.api_core.retry.retry_base import build_retry_error +from google.api_core.retry.retry_base import RetryFailureReason + + +if TYPE_CHECKING: + if sys.version_info >= (3, 10): + from typing import ParamSpec + else: + from typing_extensions import ParamSpec + + _P = ParamSpec("_P") # target function call parameters + _R = TypeVar("_R") # target function returned value + +_ASYNC_RETRY_WARNING = "Using the synchronous google.api_core.retry.Retry with asynchronous calls may lead to unexpected results. Please use google.api_core.retry_async.AsyncRetry instead." + + +def retry_target( + target: Callable[[], _R], + predicate: Callable[[Exception], bool], + sleep_generator: Iterable[float], + timeout: float | None = None, + on_error: Callable[[Exception], None] | None = None, + exception_factory: Callable[ + [list[Exception], RetryFailureReason, float | None], + tuple[Exception, Exception | None], + ] = build_retry_error, + **kwargs, +): + """Call a function and retry if it fails. + + This is the lowest-level retry helper. Generally, you'll use the + higher-level retry helper :class:`Retry`. + + Args: + target(Callable): The function to call and retry. This must be a + nullary function - apply arguments with `functools.partial`. + predicate (Callable[Exception]): A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + sleep_generator (Iterable[float]): An infinite iterator that determines + how long to sleep between retries. + timeout (Optional[float]): How long to keep retrying the target. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error (Optional[Callable[Exception]]): If given, the on_error + callback will be called with each retryable exception raised by the + target. Any error raised by this function will *not* be caught. + exception_factory: A function that is called when the retryable reaches + a terminal failure state, used to construct an exception to be raised. + It takes a list of all exceptions encountered, a retry.RetryFailureReason + enum indicating the failure cause, and the original timeout value + as arguments. It should return a tuple of the exception to be raised, + along with the cause exception if any. The default implementation will raise + a RetryError on timeout, or the last exception encountered otherwise. + deadline (float): DEPRECATED: use ``timeout`` instead. For backward + compatibility, if specified it will override ``timeout`` parameter. + + Returns: + Any: the return value of the target function. + + Raises: + ValueError: If the sleep generator stops yielding values. + Exception: a custom exception specified by the exception_factory if provided. + If no exception_factory is provided: + google.api_core.RetryError: If the timeout is exceeded while retrying. + Exception: If the target raises an error that isn't retryable. + """ + + timeout = kwargs.get("deadline", timeout) + + deadline = time.monotonic() + timeout if timeout is not None else None + error_list: list[Exception] = [] + sleep_iter = iter(sleep_generator) + + # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper + # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535 + while True: + try: + result = target() + if inspect.isawaitable(result): + warnings.warn(_ASYNC_RETRY_WARNING) + return result + + # pylint: disable=broad-except + # This function explicitly must deal with broad exceptions. + except Exception as exc: + # defer to shared logic for handling errors + next_sleep = _retry_error_helper( + exc, + deadline, + sleep_iter, + error_list, + predicate, + on_error, + exception_factory, + timeout, + ) + # if exception not raised, sleep before next attempt + time.sleep(next_sleep) + + +class Retry(_BaseRetry): + """Exponential retry decorator for unary synchronous RPCs. + + This class is a decorator used to add retry or polling behavior to an RPC + call. + + Although the default behavior is to retry transient API errors, a + different predicate can be provided to retry other exceptions. + + There are two important concepts that retry/polling behavior may operate on, + Deadline and Timeout, which need to be properly defined for the correct + usage of this class and the rest of the library. + + Deadline: a fixed point in time by which a certain operation must + terminate. For example, if a certain operation has a deadline + "2022-10-18T23:30:52.123Z" it must terminate (successfully or with an + error) by that time, regardless of when it was started or whether it + was started at all. + + Timeout: the maximum duration of time after which a certain operation + must terminate (successfully or with an error). The countdown begins right + after an operation was started. For example, if an operation was started at + 09:24:00 with timeout of 75 seconds, it must terminate no later than + 09:25:15. + + Unfortunately, in the past this class (and the api-core library as a whole) has not + been properly distinguishing the concepts of "timeout" and "deadline", and the + ``deadline`` parameter has meant ``timeout``. That is why + ``deadline`` has been deprecated and ``timeout`` should be used instead. If the + ``deadline`` parameter is set, it will override the ``timeout`` parameter. + In other words, ``retry.deadline`` should be treated as just a deprecated alias for + ``retry.timeout``. + + Said another way, it is safe to assume that this class and the rest of this + library operate in terms of timeouts (not deadlines) unless explicitly + noted the usage of deadline semantics. + + It is also important to + understand the three most common applications of the Timeout concept in the + context of this library. + + Usually the generic Timeout term may stand for one of the following actual + timeouts: RPC Timeout, Retry Timeout, or Polling Timeout. + + RPC Timeout: a value supplied by the client to the server so + that the server side knows the maximum amount of time it is expected to + spend handling that specific RPC. For example, in the case of gRPC transport, + RPC Timeout is represented by setting "grpc-timeout" header in the HTTP2 + request. The `timeout` property of this class normally never represents the + RPC Timeout as it is handled separately by the ``google.api_core.timeout`` + module of this library. + + Retry Timeout: this is the most common meaning of the ``timeout`` property + of this class, and defines how long a certain RPC may be retried in case + the server returns an error. + + Polling Timeout: defines how long the + client side is allowed to call the polling RPC repeatedly to check a status of a + long-running operation. Each polling RPC is + expected to succeed (its errors are supposed to be handled by the retry + logic). The decision as to whether a new polling attempt needs to be made is based + not on the RPC status code but on the status of the returned + status of an operation. In other words: we will poll a long-running operation until + the operation is done or the polling timeout expires. Each poll will inform us of + the status of the operation. The poll consists of an RPC to the server that may + itself be retried as per the poll-specific retry settings in case of errors. The + operation-level retry settings do NOT apply to polling-RPC retries. + + With the actual timeout types being defined above, the client libraries + often refer to just Timeout without clarifying which type specifically + that is. In that case the actual timeout type (sometimes also referred to as + Logical Timeout) can be determined from the context. If it is a unary rpc + call (i.e. a regular one) Timeout usually stands for the RPC Timeout (if + provided directly as a standalone value) or Retry Timeout (if provided as + ``retry.timeout`` property of the unary RPC's retry config). For + ``Operation`` or ``PollingFuture`` in general Timeout stands for + Polling Timeout. + + Args: + predicate (Callable[Exception]): A callable that should return ``True`` + if the given exception is retryable. + initial (float): The minimum amount of time to delay in seconds. This + must be greater than 0. + maximum (float): The maximum amount of time to delay in seconds. + multiplier (float): The multiplier applied to the delay. + timeout (Optional[float]): How long to keep retrying, in seconds. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error (Callable[Exception]): A function to call while processing + a retryable exception. Any error raised by this function will + *not* be caught. + deadline (float): DEPRECATED: use `timeout` instead. For backward + compatibility, if specified it will override the ``timeout`` parameter. + """ + + def __call__( + self, + func: Callable[_P, _R], + on_error: Callable[[Exception], Any] | None = None, + ) -> Callable[_P, _R]: + """Wrap a callable with retry behavior. + + Args: + func (Callable): The callable to add retry behavior to. + on_error (Optional[Callable[Exception]]): If given, the + on_error callback will be called with each retryable exception + raised by the wrapped function. Any error raised by this + function will *not* be caught. If on_error was specified in the + constructor, this value will be ignored. + + Returns: + Callable: A callable that will invoke ``func`` with retry + behavior. + """ + if self._on_error is not None: + on_error = self._on_error + + @functools.wraps(func) + def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R: + """A wrapper that calls target function with retry.""" + target = functools.partial(func, *args, **kwargs) + sleep_generator = exponential_sleep_generator( + self._initial, self._maximum, multiplier=self._multiplier + ) + return retry_target( + target, + self._predicate, + sleep_generator, + timeout=self._timeout, + on_error=on_error, + ) + + return retry_wrapped_func diff --git a/google/api_core/retry/retry_unary_async.py b/google/api_core/retry/retry_unary_async.py new file mode 100644 index 000000000..1f72476af --- /dev/null +++ b/google/api_core/retry/retry_unary_async.py @@ -0,0 +1,239 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for retrying coroutine functions with exponential back-off. + +The :class:`AsyncRetry` decorator shares most functionality and behavior with +:class:`Retry`, but supports coroutine functions. Please refer to description +of :class:`Retry` for more details. + +By default, this decorator will retry transient +API errors (see :func:`if_transient_error`). For example: + +.. code-block:: python + + @retry_async.AsyncRetry() + async def call_flaky_rpc(): + return await client.flaky_rpc() + + # Will retry flaky_rpc() if it raises transient API errors. + result = await call_flaky_rpc() + +You can pass a custom predicate to retry on different exceptions, such as +waiting for an eventually consistent item to be available: + +.. code-block:: python + + @retry_async.AsyncRetry(predicate=retry_async.if_exception_type(exceptions.NotFound)) + async def check_if_exists(): + return await client.does_thing_exist() + + is_available = await check_if_exists() + +Some client library methods apply retry automatically. These methods can accept +a ``retry`` parameter that allows you to configure the behavior: + +.. code-block:: python + + my_retry = retry_async.AsyncRetry(timeout=60) + result = await client.some_method(retry=my_retry) + +""" + +from __future__ import annotations + +import asyncio +import time +import functools +from typing import ( + Awaitable, + Any, + Callable, + Iterable, + TypeVar, + TYPE_CHECKING, +) + +from google.api_core.retry.retry_base import _BaseRetry +from google.api_core.retry.retry_base import _retry_error_helper +from google.api_core.retry.retry_base import exponential_sleep_generator +from google.api_core.retry.retry_base import build_retry_error +from google.api_core.retry.retry_base import RetryFailureReason + +# for backwards compatibility, expose helpers in this module +from google.api_core.retry.retry_base import if_exception_type # noqa +from google.api_core.retry.retry_base import if_transient_error # noqa + +if TYPE_CHECKING: + import sys + + if sys.version_info >= (3, 10): + from typing import ParamSpec + else: + from typing_extensions import ParamSpec + + _P = ParamSpec("_P") # target function call parameters + _R = TypeVar("_R") # target function returned value + +_DEFAULT_INITIAL_DELAY = 1.0 # seconds +_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds +_DEFAULT_DELAY_MULTIPLIER = 2.0 +_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds +_DEFAULT_TIMEOUT = 60.0 * 2.0 # seconds + + +async def retry_target( + target: Callable[[], Awaitable[_R]], + predicate: Callable[[Exception], bool], + sleep_generator: Iterable[float], + timeout: float | None = None, + on_error: Callable[[Exception], None] | None = None, + exception_factory: Callable[ + [list[Exception], RetryFailureReason, float | None], + tuple[Exception, Exception | None], + ] = build_retry_error, + **kwargs, +): + """Await a coroutine and retry if it fails. + + This is the lowest-level retry helper. Generally, you'll use the + higher-level retry helper :class:`Retry`. + + Args: + target(Callable[[], Any]): The function to call and retry. This must be a + nullary function - apply arguments with `functools.partial`. + predicate (Callable[Exception]): A callable used to determine if an + exception raised by the target should be considered retryable. + It should return True to retry or False otherwise. + sleep_generator (Iterable[float]): An infinite iterator that determines + how long to sleep between retries. + timeout (Optional[float]): How long to keep retrying the target, in seconds. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error (Optional[Callable[Exception]]): If given, the on_error + callback will be called with each retryable exception raised by the + target. Any error raised by this function will *not* be caught. + exception_factory: A function that is called when the retryable reaches + a terminal failure state, used to construct an exception to be raised. + It takes a list of all exceptions encountered, a retry.RetryFailureReason + enum indicating the failure cause, and the original timeout value + as arguments. It should return a tuple of the exception to be raised, + along with the cause exception if any. The default implementation will raise + a RetryError on timeout, or the last exception encountered otherwise. + deadline (float): DEPRECATED use ``timeout`` instead. For backward + compatibility, if set it will override the ``timeout`` parameter. + + Returns: + Any: the return value of the target function. + + Raises: + ValueError: If the sleep generator stops yielding values. + Exception: a custom exception specified by the exception_factory if provided. + If no exception_factory is provided: + google.api_core.RetryError: If the timeout is exceeded while retrying. + Exception: If the target raises an error that isn't retryable. + """ + + timeout = kwargs.get("deadline", timeout) + + deadline = time.monotonic() + timeout if timeout is not None else None + error_list: list[Exception] = [] + sleep_iter = iter(sleep_generator) + + # continue trying until an attempt completes, or a terminal exception is raised in _retry_error_helper + # TODO: support max_attempts argument: https://github.com/googleapis/python-api-core/issues/535 + while True: + try: + return await target() + # pylint: disable=broad-except + # This function explicitly must deal with broad exceptions. + except Exception as exc: + # defer to shared logic for handling errors + next_sleep = _retry_error_helper( + exc, + deadline, + sleep_iter, + error_list, + predicate, + on_error, + exception_factory, + timeout, + ) + # if exception not raised, sleep before next attempt + await asyncio.sleep(next_sleep) + + +class AsyncRetry(_BaseRetry): + """Exponential retry decorator for async coroutines. + + This class is a decorator used to add exponential back-off retry behavior + to an RPC call. + + Although the default behavior is to retry transient API errors, a + different predicate can be provided to retry other exceptions. + + Args: + predicate (Callable[Exception]): A callable that should return ``True`` + if the given exception is retryable. + initial (float): The minimum amount of time to delay in seconds. This + must be greater than 0. + maximum (float): The maximum amount of time to delay in seconds. + multiplier (float): The multiplier applied to the delay. + timeout (Optional[float]): How long to keep retrying in seconds. + Note: timeout is only checked before initiating a retry, so the target may + run past the timeout value as long as it is healthy. + on_error (Optional[Callable[Exception]]): A function to call while processing + a retryable exception. Any error raised by this function will + *not* be caught. + deadline (float): DEPRECATED use ``timeout`` instead. If set it will + override ``timeout`` parameter. + """ + + def __call__( + self, + func: Callable[..., Awaitable[_R]], + on_error: Callable[[Exception], Any] | None = None, + ) -> Callable[_P, Awaitable[_R]]: + """Wrap a callable with retry behavior. + + Args: + func (Callable): The callable or stream to add retry behavior to. + on_error (Optional[Callable[Exception]]): If given, the + on_error callback will be called with each retryable exception + raised by the wrapped function. Any error raised by this + function will *not* be caught. If on_error was specified in the + constructor, this value will be ignored. + + Returns: + Callable: A callable that will invoke ``func`` with retry + behavior. + """ + if self._on_error is not None: + on_error = self._on_error + + @functools.wraps(func) + async def retry_wrapped_func(*args: _P.args, **kwargs: _P.kwargs) -> _R: + """A wrapper that calls target function with retry.""" + sleep_generator = exponential_sleep_generator( + self._initial, self._maximum, multiplier=self._multiplier + ) + return await retry_target( + functools.partial(func, *args, **kwargs), + predicate=self._predicate, + sleep_generator=sleep_generator, + timeout=self._timeout, + on_error=on_error, + ) + + return retry_wrapped_func diff --git a/google/api_core/retry_async.py b/google/api_core/retry_async.py index 739e88d91..90a2d5adf 100644 --- a/google/api_core/retry_async.py +++ b/google/api_core/retry_async.py @@ -1,4 +1,4 @@ -# Copyright 2020 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,301 +11,24 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -"""Helpers for retrying coroutine functions with exponential back-off. - -The :class:`AsyncRetry` decorator shares most functionality and behavior with -:class:`Retry`, but supports coroutine functions. Please refer to description -of :class:`Retry` for more details. - -By default, this decorator will retry transient -API errors (see :func:`if_transient_error`). For example: - -.. code-block:: python - - @retry_async.AsyncRetry() - async def call_flaky_rpc(): - return await client.flaky_rpc() - - # Will retry flaky_rpc() if it raises transient API errors. - result = await call_flaky_rpc() - -You can pass a custom predicate to retry on different exceptions, such as -waiting for an eventually consistent item to be available: - -.. code-block:: python - - @retry_async.AsyncRetry(predicate=retry_async.if_exception_type(exceptions.NotFound)) - async def check_if_exists(): - return await client.does_thing_exist() - - is_available = await check_if_exists() - -Some client library methods apply retry automatically. These methods can accept -a ``retry`` parameter that allows you to configure the behavior: - -.. code-block:: python - - my_retry = retry_async.AsyncRetry(deadline=60) - result = await client.some_method(retry=my_retry) - -""" - -import asyncio -import datetime -import functools -import logging - -from google.api_core import datetime_helpers -from google.api_core import exceptions -from google.api_core.retry import exponential_sleep_generator +# +# The following imports are for backwards compatibility with https://github.com/googleapis/python-api-core/blob/4d7d2edee2c108d43deb151e6e0fdceb56b73275/google/api_core/retry_async.py +# +# TODO: Revert these imports on the next major version release (https://github.com/googleapis/python-api-core/issues/576) +from google.api_core import datetime_helpers # noqa: F401 +from google.api_core import exceptions # noqa: F401 +from google.api_core.retry import exponential_sleep_generator # noqa: F401 from google.api_core.retry import if_exception_type # noqa: F401 -from google.api_core.retry import if_transient_error - - -_LOGGER = logging.getLogger(__name__) -_DEFAULT_INITIAL_DELAY = 1.0 # seconds -_DEFAULT_MAXIMUM_DELAY = 60.0 # seconds -_DEFAULT_DELAY_MULTIPLIER = 2.0 -_DEFAULT_DEADLINE = 60.0 * 2.0 # seconds -_DEFAULT_TIMEOUT = 60.0 * 2.0 # seconds - - -async def retry_target( - target, predicate, sleep_generator, timeout=None, on_error=None, **kwargs -): - """Call a function and retry if it fails. - - This is the lowest-level retry helper. Generally, you'll use the - higher-level retry helper :class:`Retry`. - - Args: - target(Callable): The function to call and retry. This must be a - nullary function - apply arguments with `functools.partial`. - predicate (Callable[Exception]): A callable used to determine if an - exception raised by the target should be considered retryable. - It should return True to retry or False otherwise. - sleep_generator (Iterable[float]): An infinite iterator that determines - how long to sleep between retries. - timeout (float): How long to keep retrying the target, in seconds. - on_error (Callable[Exception]): A function to call while processing a - retryable exception. Any error raised by this function will *not* - be caught. - deadline (float): DEPRECATED use ``timeout`` instead. For backward - compatibility, if set it will override the ``timeout`` parameter. - - Returns: - Any: the return value of the target function. - - Raises: - google.api_core.RetryError: If the deadline is exceeded while retrying. - ValueError: If the sleep generator stops yielding values. - Exception: If the target raises a method that isn't retryable. - """ - - timeout = kwargs.get("deadline", timeout) - - deadline_dt = ( - (datetime_helpers.utcnow() + datetime.timedelta(seconds=timeout)) - if timeout - else None - ) - - last_exc = None - - for sleep in sleep_generator: - try: - if not deadline_dt: - return await target() - else: - return await asyncio.wait_for( - target(), - timeout=(deadline_dt - datetime_helpers.utcnow()).total_seconds(), - ) - # pylint: disable=broad-except - # This function explicitly must deal with broad exceptions. - except Exception as exc: - if not predicate(exc) and not isinstance(exc, asyncio.TimeoutError): - raise - last_exc = exc - if on_error is not None: - on_error(exc) - - now = datetime_helpers.utcnow() - - if deadline_dt: - if deadline_dt <= now: - # Chains the raising RetryError with the root cause error, - # which helps observability and debugability. - raise exceptions.RetryError( - "Timeout of {:.1f}s exceeded while calling target function".format( - timeout - ), - last_exc, - ) from last_exc - else: - time_to_deadline = (deadline_dt - now).total_seconds() - sleep = min(time_to_deadline, sleep) - - _LOGGER.debug( - "Retrying due to {}, sleeping {:.1f}s ...".format(last_exc, sleep) - ) - await asyncio.sleep(sleep) - - raise ValueError("Sleep generator stopped yielding sleep values.") - - -class AsyncRetry: - """Exponential retry decorator for async functions. - - This class is a decorator used to add exponential back-off retry behavior - to an RPC call. - - Although the default behavior is to retry transient API errors, a - different predicate can be provided to retry other exceptions. - - Args: - predicate (Callable[Exception]): A callable that should return ``True`` - if the given exception is retryable. - initial (float): The minimum a,out of time to delay in seconds. This - must be greater than 0. - maximum (float): The maximum amount of time to delay in seconds. - multiplier (float): The multiplier applied to the delay. - timeout (float): How long to keep retrying in seconds. - on_error (Callable[Exception]): A function to call while processing - a retryable exception. Any error raised by this function will - *not* be caught. - deadline (float): DEPRECATED use ``timeout`` instead. If set it will - override ``timeout`` parameter. - """ - - def __init__( - self, - predicate=if_transient_error, - initial=_DEFAULT_INITIAL_DELAY, - maximum=_DEFAULT_MAXIMUM_DELAY, - multiplier=_DEFAULT_DELAY_MULTIPLIER, - timeout=_DEFAULT_TIMEOUT, - on_error=None, - **kwargs - ): - self._predicate = predicate - self._initial = initial - self._multiplier = multiplier - self._maximum = maximum - self._timeout = kwargs.get("deadline", timeout) - self._deadline = self._timeout - self._on_error = on_error - - def __call__(self, func, on_error=None): - """Wrap a callable with retry behavior. - - Args: - func (Callable): The callable to add retry behavior to. - on_error (Callable[Exception]): A function to call while processing - a retryable exception. Any error raised by this function will - *not* be caught. - - Returns: - Callable: A callable that will invoke ``func`` with retry - behavior. - """ - if self._on_error is not None: - on_error = self._on_error - - @functools.wraps(func) - async def retry_wrapped_func(*args, **kwargs): - """A wrapper that calls target function with retry.""" - target = functools.partial(func, *args, **kwargs) - sleep_generator = exponential_sleep_generator( - self._initial, self._maximum, multiplier=self._multiplier - ) - return await retry_target( - target, - self._predicate, - sleep_generator, - self._timeout, - on_error=on_error, - ) - - return retry_wrapped_func - - def _replace( - self, - predicate=None, - initial=None, - maximum=None, - multiplier=None, - timeout=None, - on_error=None, - ): - return AsyncRetry( - predicate=predicate or self._predicate, - initial=initial or self._initial, - maximum=maximum or self._maximum, - multiplier=multiplier or self._multiplier, - timeout=timeout or self._timeout, - on_error=on_error or self._on_error, - ) - - def with_deadline(self, deadline): - """Return a copy of this retry with the given deadline. - DEPRECATED: use :meth:`with_timeout` instead. - - Args: - deadline (float): How long to keep retrying. - - Returns: - AsyncRetry: A new retry instance with the given deadline. - """ - return self._replace(timeout=deadline) - - def with_timeout(self, timeout): - """Return a copy of this retry with the given timeout. - - Args: - timeout (float): How long to keep retrying, in seconds. - - Returns: - AsyncRetry: A new retry instance with the given timeout. - """ - return self._replace(timeout=timeout) - - def with_predicate(self, predicate): - """Return a copy of this retry with the given predicate. - - Args: - predicate (Callable[Exception]): A callable that should return - ``True`` if the given exception is retryable. - - Returns: - AsyncRetry: A new retry instance with the given predicate. - """ - return self._replace(predicate=predicate) - - def with_delay(self, initial=None, maximum=None, multiplier=None): - """Return a copy of this retry with the given delay options. - - Args: - initial (float): The minimum amount of time to delay. This must - be greater than 0. - maximum (float): The maximum amount of time to delay. - multiplier (float): The multiplier applied to the delay. - - Returns: - AsyncRetry: A new retry instance with the given predicate. - """ - return self._replace(initial=initial, maximum=maximum, multiplier=multiplier) - - def __str__(self): - return ( - "".format( - self._predicate, - self._initial, - self._maximum, - self._multiplier, - self._timeout, - self._on_error, - ) - ) +from google.api_core.retry import if_transient_error # noqa: F401 +from google.api_core.retry.retry_unary_async import AsyncRetry +from google.api_core.retry.retry_unary_async import retry_target + +__all__ = ( + "AsyncRetry", + "datetime_helpers", + "exceptions", + "exponential_sleep_generator", + "if_exception_type", + "if_transient_error", + "retry_target", +) diff --git a/google/api_core/timeout.py b/google/api_core/timeout.py index 868e3e9f4..55b195e90 100644 --- a/google/api_core/timeout.py +++ b/google/api_core/timeout.py @@ -102,8 +102,7 @@ def __call__(self, func): def func_with_timeout(*args, **kwargs): """Wrapped function that adds timeout.""" - remaining_timeout = self._timeout - if remaining_timeout is not None: + if self._timeout is not None: # All calculations are in seconds now_timestamp = self._clock().timestamp() @@ -114,8 +113,19 @@ def func_with_timeout(*args, **kwargs): now_timestamp = first_attempt_timestamp time_since_first_attempt = now_timestamp - first_attempt_timestamp - # Avoid setting negative timeout - kwargs["timeout"] = max(0, self._timeout - time_since_first_attempt) + remaining_timeout = self._timeout - time_since_first_attempt + + # Although the `deadline` parameter in `google.api_core.retry.Retry` + # is deprecated, and should be treated the same as the `timeout`, + # it is still possible for the `deadline` argument in + # `google.api_core.retry.Retry` to be larger than the `timeout`. + # See https://github.com/googleapis/python-api-core/issues/654 + # Only positive non-zero timeouts are supported. + # Revert back to the initial timeout for negative or 0 timeout values. + if remaining_timeout < 1: + remaining_timeout = self._timeout + + kwargs["timeout"] = remaining_timeout return func(*args, **kwargs) diff --git a/google/api_core/universe.py b/google/api_core/universe.py new file mode 100644 index 000000000..35669642c --- /dev/null +++ b/google/api_core/universe.py @@ -0,0 +1,82 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for universe domain.""" + +from typing import Any, Optional + +DEFAULT_UNIVERSE = "googleapis.com" + + +class EmptyUniverseError(ValueError): + def __init__(self): + message = "Universe Domain cannot be an empty string." + super().__init__(message) + + +class UniverseMismatchError(ValueError): + def __init__(self, client_universe, credentials_universe): + message = ( + f"The configured universe domain ({client_universe}) does not match the universe domain " + f"found in the credentials ({credentials_universe}). " + "If you haven't configured the universe domain explicitly, " + f"`{DEFAULT_UNIVERSE}` is the default." + ) + super().__init__(message) + + +def determine_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] +) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the + "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise EmptyUniverseError + return universe_domain + + +def compare_domains(client_universe: str, credentials: Any) -> bool: + """Returns True iff the universe domains used by the client and credentials match. + + Args: + client_universe (str): The universe domain configured via the client options. + credentials Any: The credentials being used in the client. + + Returns: + bool: True iff client_universe matches the universe in credentials. + + Raises: + ValueError: when client_universe does not match the universe in credentials. + """ + credentials_universe = getattr(credentials, "universe_domain", DEFAULT_UNIVERSE) + + if client_universe != credentials_universe: + raise UniverseMismatchError(client_universe, credentials_universe) + return True diff --git a/google/api_core/version.py b/google/api_core/version.py index ba8b4e8af..967959b05 100644 --- a/google/api_core/version.py +++ b/google/api_core/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.14.0" +__version__ = "2.28.1" diff --git a/google/api_core/version_header.py b/google/api_core/version_header.py new file mode 100644 index 000000000..cf1972aca --- /dev/null +++ b/google/api_core/version_header.py @@ -0,0 +1,29 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +API_VERSION_METADATA_KEY = "x-goog-api-version" + + +def to_api_version_header(version_identifier): + """Returns data for the API Version header for the given `version_identifier`. + + Args: + version_identifier (str): The version identifier to be used in the + tuple returned. + + Returns: + Tuple(str, str): A tuple containing the API Version metadata key and + value. + """ + return (API_VERSION_METADATA_KEY, version_identifier) diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index ce33582a6..000000000 --- a/mypy.ini +++ /dev/null @@ -1,4 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True -ignore_missing_imports = True diff --git a/noxfile.py b/noxfile.py index 82ea26697..9f53dbde0 100644 --- a/noxfile.py +++ b/noxfile.py @@ -15,18 +15,22 @@ from __future__ import absolute_import import os import pathlib +import re import shutil +import unittest # https://github.com/google/importlab/issues/25 import nox # pytype: disable=import-error -BLACK_VERSION = "black==22.3.0" +BLACK_VERSION = "black==23.7.0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] # Black and flake8 clash on the syntax for ignoring flake8's F401 in this file. BLACK_EXCLUDES = ["--exclude", "^/google/api_core/operations_v1/__init__.py"] -DEFAULT_PYTHON_VERSION = "3.10" +PYTHON_VERSIONS = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] + +DEFAULT_PYTHON_VERSION = "3.14" CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() # 'docfx' is excluded since it only needs to run in 'docs-presubmit' @@ -34,6 +38,8 @@ "unit", "unit_grpc_gcp", "unit_wo_grpc", + "unit_w_prerelease_deps", + "unit_w_async_rest_extra", "cover", "pytype", "mypy", @@ -43,15 +49,8 @@ "docs", ] - -def _greater_or_equal_than_37(version_string): - tokens = version_string.split(".") - for i, token in enumerate(tokens): - try: - tokens[i] = int(token) - except ValueError: - pass - return tokens >= [3, 7] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -82,7 +81,37 @@ def blacken(session): session.run("black", *BLACK_EXCLUDES, *BLACK_PATHS) -def default(session, install_grpc=True): +def install_prerelease_dependencies(session, constraints_path): + with open(constraints_path, encoding="utf-8") as constraints_file: + constraints_text = constraints_file.read() + # Ignore leading whitespace and comment lines. + constraints_deps = [ + match.group(1) + for match in re.finditer( + r"^\s*(\S+)(?===\S+)", constraints_text, flags=re.MULTILINE + ) + ] + session.install(*constraints_deps) + prerel_deps = [ + "google-auth", + "googleapis-common-protos", + "grpcio", + "grpcio-status", + "proto-plus", + "protobuf", + ] + + for dep in prerel_deps: + session.install("--pre", "--no-deps", "--upgrade", dep) + + # Remaining dependencies + other_deps = [ + "requests", + ] + session.install(*other_deps) + + +def default(session, install_grpc=True, prerelease=False, install_async_rest=False): """Default unit test session. This is intended to be run **without** an interpreter set, so @@ -90,22 +119,67 @@ def default(session, install_grpc=True): Python corresponding to the ``nox`` binary the ``PATH`` can run the tests. """ - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" - ) + if prerelease and not install_grpc: + unittest.skip("The pre-release session cannot be run without grpc") session.install( "dataclasses", - "mock", + "mock; python_version=='3.7'", "pytest", "pytest-cov", + "pytest-mock", "pytest-xdist", ) + install_extras = [] if install_grpc: - session.install("-e", ".[grpc]", "-c", constraints_path) + # Note: The extra is called `grpc` and not `grpcio`. + install_extras.append("grpc") + + constraints_dir = str(CURRENT_DIRECTORY / "testing") + if install_async_rest: + install_extras.append("async_rest") + constraints_type = "async-rest-" + else: + constraints_type = "" + + lib_with_extras = f".[{','.join(install_extras)}]" if len(install_extras) else "." + if prerelease: + install_prerelease_dependencies( + session, + f"{constraints_dir}/constraints-{constraints_type}{PYTHON_VERSIONS[0]}.txt", + ) + # This *must* be the last install command to get the package from source. + session.install("-e", lib_with_extras, "--no-deps") else: - session.install("-e", ".", "-c", constraints_path) + constraints_file = ( + f"{constraints_dir}/constraints-{constraints_type}{session.python}.txt" + ) + # fall back to standard constraints file + if not pathlib.Path(constraints_file).exists(): + constraints_file = f"{constraints_dir}/constraints-{session.python}.txt" + + session.install( + "-e", + lib_with_extras, + "-c", + constraints_file, + ) + + # Print out package versions of dependencies + session.run( + "python", "-c", "import google.protobuf; print(google.protobuf.__version__)" + ) + # Support for proto.version was added in v1.23.0 + # https://github.com/googleapis/proto-plus-python/releases/tag/v1.23.0 + session.run( + "python", + "-c", + """import proto; hasattr(proto, "version") and print(proto.version.__version__)""", + ) + if install_grpc: + session.run("python", "-c", "import grpc; print(grpc.__version__)") + session.run("python", "-c", "import google.auth; print(google.auth.__version__)") pytest_args = [ "python", @@ -129,71 +203,80 @@ def default(session, install_grpc=True): ), ] - # Inject AsyncIO content and proto-plus, if version >= 3.7. - # proto-plus is needed for a field mask test in test_protobuf_helpers.py - if _greater_or_equal_than_37(session.python): - session.install("asyncmock", "pytest-asyncio", "proto-plus") + session.install("asyncmock", "pytest-asyncio") - # Having positional arguments means the user wants to run specific tests. - # Best not to add additional tests to that list. - if not session.posargs: - pytest_args.append("--cov=tests.asyncio") - pytest_args.append(os.path.join("tests", "asyncio")) + # Having positional arguments means the user wants to run specific tests. + # Best not to add additional tests to that list. + if not session.posargs: + pytest_args.append("--cov=tests.asyncio") + pytest_args.append(os.path.join("tests", "asyncio")) session.run(*pytest_args) -@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11"]) -def unit(session): +@nox.session(python=PYTHON_VERSIONS) +@nox.parametrize( + ["install_grpc_gcp", "install_grpc", "install_async_rest"], + [ + (False, True, False), # Run unit tests with grpcio installed + (True, True, False), # Run unit tests with grpcio/grpcio-gcp installed + (False, False, False), # Run unit tests without grpcio installed + (False, True, True), # Run unit tests with grpcio and async rest installed + ], +) +def unit(session, install_grpc_gcp, install_grpc, install_async_rest): """Run the unit test suite.""" - default(session) - -@nox.session(python=["3.7", "3.8", "3.9", "3.10", "3.11"]) -def unit_grpc_gcp(session): - """Run the unit test suite with grpcio-gcp installed.""" - constraints_path = str( - CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + # `grpcio-gcp` doesn't support protobuf 4+. + # Remove extra `grpcgcp` when protobuf 3.x is dropped. + # https://github.com/googleapis/python-api-core/issues/594 + if install_grpc_gcp: + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + # Install grpcio-gcp + session.install("-e", ".[grpcgcp]", "-c", constraints_path) + # Install protobuf < 4.0.0 + session.install("protobuf<4.0.0") + + default( + session=session, + install_grpc=install_grpc, + install_async_rest=install_async_rest, ) - # Install grpcio-gcp - session.install("-e", ".[grpcgcp]", "-c", constraints_path) - # Install protobuf < 4.0.0 - session.install("protobuf<4.0.0") - - default(session) -@nox.session(python=["3.8", "3.10", "3.11"]) -def unit_wo_grpc(session): - """Run the unit test suite w/o grpcio installed""" - default(session, install_grpc=False) +@nox.session(python=DEFAULT_PYTHON_VERSION) +def prerelease_deps(session): + """Run the unit test suite.""" + default(session, prerelease=True) @nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "Pygments") + session.install("docutils", "Pygments", "setuptools") session.run("python", "setup.py", "check", "--restructuredtext", "--strict") -@nox.session(python="3.8") +@nox.session(python=DEFAULT_PYTHON_VERSION) def pytype(session): """Run type-checking.""" - session.install(".[grpc]", "pytype >= 2019.3.21") + session.install(".[grpc]", "pytype") session.run("pytype") @nox.session(python=DEFAULT_PYTHON_VERSION) def mypy(session): """Run type-checking.""" - session.install(".[grpc]", "mypy") + session.install(".[grpc,async_rest]", "mypy") session.install( "types-setuptools", "types-requests", "types-protobuf", - "types-mock", "types-dataclasses", + "types-mock; python_version=='3.7'", ) session.run("mypy", "google", "tests") @@ -210,12 +293,25 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.9") +@nox.session(python="3.10") def docs(session): """Build the docs for this library.""" session.install("-e", ".[grpc]") - session.install("sphinx==4.2.0", "alabaster", "recommonmark") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "sphinx==4.5.0", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -237,7 +333,20 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") + session.install( + # We need to pin to specific versions of the `sphinxcontrib-*` packages + # which still support sphinx 4.x. + # See https://github.com/googleapis/sphinx-docfx-yaml/issues/344 + # and https://github.com/googleapis/sphinx-docfx-yaml/issues/345. + "sphinxcontrib-applehelp==1.0.4", + "sphinxcontrib-devhelp==1.0.2", + "sphinxcontrib-htmlhelp==2.0.1", + "sphinxcontrib-qthelp==1.0.3", + "sphinxcontrib-serializinghtml==1.1.5", + "gcp-sphinx-docfx-yaml", + "alabaster", + "recommonmark", + ) shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/owlbot.py b/owlbot.py deleted file mode 100644 index 5a83032e8..000000000 --- a/owlbot.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""This script is used to synthesize generated parts of this library.""" - -import synthtool as s -from synthtool import gcp -from synthtool.languages import python - -common = gcp.CommonTemplates() - -# ---------------------------------------------------------------------------- -# Add templated files -# ---------------------------------------------------------------------------- -excludes = [ - "noxfile.py", # pytype - "setup.cfg", # pytype - ".flake8", # flake8-import-order, layout - ".coveragerc", # layout - "CONTRIBUTING.rst", # no systests - ".github/workflows/unittest.yml", # exclude unittest gh action - ".github/workflows/lint.yml", # exclude lint gh action - "README.rst", -] -templated_files = common.py_library(microgenerator=True, cov_level=100) -s.move(templated_files, excludes=excludes) - -# Add pytype support -s.replace( - ".gitignore", - """\ -.pytest_cache -""", - """\ -.pytest_cache -.pytype -""", -) - -python.configure_previous_major_version_branches() - -s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..31f82052a --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,115 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "google-api-core" +authors = [{ name = "Google LLC", email = "googleapis-packages@google.com" }] +license = { text = "Apache 2.0" } +requires-python = ">=3.7" +readme = "README.rst" +description = "Google API client core library" +classifiers = [ + # Should be one of: + # "Development Status :: 3 - Alpha" + # "Development Status :: 4 - Beta" + # "Development Status :: 5 - Production/Stable" + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Operating System :: OS Independent", + "Topic :: Internet", +] +dependencies = [ + "googleapis-common-protos >= 1.56.2, < 2.0.0", + "protobuf >= 3.19.5, < 7.0.0, != 3.20.0, != 3.20.1, != 4.21.0, != 4.21.1, != 4.21.2, != 4.21.3, != 4.21.4, != 4.21.5", + "proto-plus >= 1.22.3, < 2.0.0", + "proto-plus >= 1.25.0, < 2.0.0; python_version >= '3.13'", + "google-auth >= 2.14.1, < 3.0.0", + "requests >= 2.18.0, < 3.0.0", + # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove + # `importlib_metadata` once we drop support for Python 3.7 + "importlib_metadata>=1.4; python_version<'3.8'", +] +dynamic = ["version"] + +[project.urls] +Documentation = "https://googleapis.dev/python/google-api-core/latest/" +Repository = "https://github.com/googleapis/python-api-core" + +[project.optional-dependencies] +async_rest = ["google-auth[aiohttp] >= 2.35.0, < 3.0.0"] +grpc = [ + "grpcio >= 1.33.2, < 2.0.0", + "grpcio >= 1.49.1, < 2.0.0; python_version >= '3.11'", + "grpcio >= 1.75.1, < 2.0.0; python_version >= '3.14'", + "grpcio-status >= 1.33.2, < 2.0.0", + "grpcio-status >= 1.49.1, < 2.0.0; python_version >= '3.11'", + "grpcio-status >= 1.75.1, < 2.0.0; python_version >= '3.14'", +] +grpcgcp = ["grpcio-gcp >= 0.2.2, < 1.0.0"] +grpcio-gcp = ["grpcio-gcp >= 0.2.2, < 1.0.0"] + +[tool.setuptools.dynamic] +version = { attr = "google.api_core.version.__version__" } + +[tool.setuptools.packages.find] +# Only include packages under the 'google' namespace. Do not include tests, +# benchmarks, etc. +include = ["google*"] + +[tool.mypy] +python_version = "3.7" +namespace_packages = true +ignore_missing_imports = true + +[tool.pytest] +filterwarnings = [ + # treat all warnings as errors + "error", + # Prevent Python version warnings from interfering with tests + "ignore:.* Python version .*:FutureWarning", + # Remove once https://github.com/pytest-dev/pytest-cov/issues/621 is fixed + "ignore:.*The --rsyncdir command line argument and rsyncdirs config variable are deprecated:DeprecationWarning", + # Remove once https://github.com/protocolbuffers/protobuf/issues/12186 is fixed + "ignore:.*custom tp_new.*in Python 3.14:DeprecationWarning", + # Remove once support for python 3.7 is dropped + # This warning only appears when using python 3.7 + "ignore:.*Using or importing the ABCs from.*collections:DeprecationWarning", + # Remove once support for grpcio-gcp is deprecated + # See https://github.com/googleapis/python-api-core/blob/42e8b6e6f426cab749b34906529e8aaf3f133d75/google/api_core/grpc_helpers.py#L39-L45 + "ignore:.*Support for grpcio-gcp is deprecated:DeprecationWarning", + "ignore: The `compression` argument is ignored for grpc_gcp.secure_channel creation:DeprecationWarning", + "ignore:The `attempt_direct_path` argument is ignored for grpc_gcp.secure_channel creation:DeprecationWarning", + # Remove once the minimum supported version of googleapis-common-protos is 1.62.0 + "ignore:.*pkg_resources.declare_namespace:DeprecationWarning", + "ignore:.*pkg_resources is deprecated as an API:DeprecationWarning", + # Remove once https://github.com/grpc/grpc/issues/35086 is fixed (and version newer than 1.60.0 is published) + "ignore:There is no current event loop:DeprecationWarning", + # Remove after support for Python 3.7 is dropped + "ignore:After January 1, 2024, new releases of this library will drop support for Python 3.7:DeprecationWarning", +] diff --git a/renovate.json b/renovate.json index 39b2a0ec9..c7875c469 100644 --- a/renovate.json +++ b/renovate.json @@ -5,7 +5,7 @@ ":preserveSemverRanges", ":disableDependencyDashboard" ], - "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py"], + "ignorePaths": [".pre-commit-config.yaml", ".kokoro/requirements.txt", "setup.py", ".github/workflows/unittest.yml"], "pip_requirements": { "fileMatch": ["requirements-test.txt", "samples/[\\S/]*constraints.txt", "samples/[\\S/]*constraints-test.txt"] } diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh index 0018b421d..120b0ddc4 100755 --- a/scripts/decrypt-secrets.sh +++ b/scripts/decrypt-secrets.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Copyright 2023 Google LLC All rights reserved. +# Copyright 2024 Google LLC All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py index 1acc11983..8f5e248a0 100644 --- a/scripts/readme-gen/readme_gen.py +++ b/scripts/readme-gen/readme_gen.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/setup.py b/setup.py index d4639a906..168877fa5 100644 --- a/setup.py +++ b/setup.py @@ -12,95 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import io -import os - import setuptools -# Package metadata. - -name = "google-api-core" -description = "Google API client core library" - -# Should be one of: -# 'Development Status :: 3 - Alpha' -# 'Development Status :: 4 - Beta' -# 'Development Status :: 5 - Production/Stable' -release_status = "Development Status :: 5 - Production/Stable" -dependencies = [ - "googleapis-common-protos >= 1.56.2, < 2.0.dev0", - "protobuf>=3.19.5,<5.0.0.dev0,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", - "google-auth >= 2.14.1, < 3.0.dev0", - "requests >= 2.18.0, < 3.0.0.dev0", -] -extras = { - "grpc": [ - "grpcio >= 1.33.2, < 2.0dev", - "grpcio >= 1.49.1, < 2.0dev; python_version>='3.11'", - "grpcio-status >= 1.33.2, < 2.0.dev0", - "grpcio-status >= 1.49.1, < 2.0.dev0; python_version>='3.11'", - ], - "grpcgcp": "grpcio-gcp >= 0.2.2, < 1.0.dev0", - "grpcio-gcp": "grpcio-gcp >= 0.2.2, < 1.0.dev0", -} - - -# Setup boilerplate below this line. - -package_root = os.path.abspath(os.path.dirname(__file__)) - - -version = {} -with open(os.path.join(package_root, "google/api_core/version.py")) as fp: - exec(fp.read(), version) -version = version["__version__"] - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -# Only include packages under the 'google' namespace. Do not include tests, -# benchmarks, etc. -packages = [ - package for package in setuptools.find_packages() if package.startswith("google") -] - -# Determine which namespaces are needed. -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") - - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url="https://github.com/googleapis/python-api-core", - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - namespace_packages=namespaces, - install_requires=dependencies, - extras_require=extras, - python_requires=">=3.7", - include_package_data=True, - zip_safe=False, -) +setuptools.setup() diff --git a/testing/constraints-3.12.txt b/testing/constraints-3.12.txt new file mode 100644 index 000000000..e69de29bb diff --git a/testing/constraints-3.13.txt b/testing/constraints-3.13.txt new file mode 100644 index 000000000..e69de29bb diff --git a/testing/constraints-3.14.txt b/testing/constraints-3.14.txt new file mode 100644 index 000000000..e69de29bb diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index ec041297a..1a9b85d12 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -9,7 +9,8 @@ googleapis-common-protos==1.56.2 protobuf==3.19.5 google-auth==2.14.1 requests==2.18.0 -packaging==14.3 grpcio==1.33.2 grpcio-status==1.33.2 grpcio-gcp==0.2.2 +proto-plus==1.22.3 +importlib_metadata==1.4 diff --git a/testing/constraints-async-rest-3.7.txt b/testing/constraints-async-rest-3.7.txt new file mode 100644 index 000000000..7aedeb1ca --- /dev/null +++ b/testing/constraints-async-rest-3.7.txt @@ -0,0 +1,17 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +googleapis-common-protos==1.56.2 +protobuf==3.19.5 +google-auth==2.35.0 +# from google-auth[aiohttp] +aiohttp==3.6.2 +requests==2.20.0 +grpcio==1.33.2 +grpcio-status==1.33.2 +grpcio-gcp==0.2.2 +proto-plus==1.22.3 diff --git a/tests/asyncio/future/test_async_future.py b/tests/asyncio/future/test_async_future.py index 0cfe67734..659f41cf9 100644 --- a/tests/asyncio/future/test_async_future.py +++ b/tests/asyncio/future/test_async_future.py @@ -13,8 +13,8 @@ # limitations under the License. import asyncio +from unittest import mock -import mock import pytest from google.api_core import exceptions diff --git a/tests/asyncio/gapic/test_method_async.py b/tests/asyncio/gapic/test_method_async.py index ee2069796..40dd168a0 100644 --- a/tests/asyncio/gapic/test_method_async.py +++ b/tests/asyncio/gapic/test_method_async.py @@ -14,7 +14,11 @@ import datetime -import mock +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore import pytest try: @@ -77,6 +81,7 @@ async def test_wrap_method_with_custom_client_info(): api_core_version=3, gapic_version=4, client_library_version=5, + protobuf_runtime_version=6, ) fake_call = grpc_helpers_async.FakeUnaryUnaryCall() method = mock.Mock(spec=aio.UnaryUnaryMultiCallable, return_value=fake_call) @@ -251,4 +256,19 @@ async def test_wrap_method_with_overriding_timeout_as_a_number(): result = await wrapped_method(timeout=22) assert result == 42 - method.assert_called_once_with(timeout=22, metadata=mock.ANY) + + actual_timeout = method.call_args[1]["timeout"] + metadata = method.call_args[1]["metadata"] + assert metadata == mock.ANY + assert actual_timeout == pytest.approx(22, abs=0.05) + + +@pytest.mark.asyncio +async def test_wrap_method_without_wrap_errors(): + fake_call = mock.AsyncMock() + + wrapped_method = gapic_v1.method_async.wrap_method(fake_call, kind="rest") + with mock.patch("google.api_core.grpc_helpers_async.wrap_errors") as method: + await wrapped_method() + + method.assert_not_called() diff --git a/tests/asyncio/operations_v1/test_operations_async_client.py b/tests/asyncio/operations_v1/test_operations_async_client.py index 19ac9b566..e5b20dcd0 100644 --- a/tests/asyncio/operations_v1/test_operations_async_client.py +++ b/tests/asyncio/operations_v1/test_operations_async_client.py @@ -12,7 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock +from unittest import mock + import pytest try: diff --git a/tests/asyncio/retry/__init__.py b/tests/asyncio/retry/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/asyncio/retry/test_retry_streaming_async.py b/tests/asyncio/retry/test_retry_streaming_async.py new file mode 100644 index 000000000..e44f5361e --- /dev/null +++ b/tests/asyncio/retry/test_retry_streaming_async.py @@ -0,0 +1,601 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import asyncio +import datetime +import re + +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore + +import pytest + +from google.api_core import exceptions +from google.api_core import retry_async +from google.api_core.retry import retry_streaming_async + +from ...unit.retry.test_retry_base import Test_BaseRetry + + +@pytest.mark.asyncio +async def test_retry_streaming_target_bad_sleep_generator(): + from google.api_core.retry.retry_streaming_async import retry_target_stream + + with pytest.raises(ValueError, match="Sleep generator"): + await retry_target_stream(None, lambda x: True, [], None).__anext__() + + +@mock.patch("asyncio.sleep", autospec=True) +@pytest.mark.asyncio +async def test_retry_streaming_target_dynamic_backoff(sleep): + """ + sleep_generator should be iterated after on_error, to support dynamic backoff + """ + from functools import partial + from google.api_core.retry.retry_streaming_async import retry_target_stream + + sleep.side_effect = RuntimeError("stop after sleep") + # start with empty sleep generator; values are added after exception in push_sleep_value + sleep_values = [] + error_target = partial(TestAsyncStreamingRetry._generator_mock, error_on=0) + inserted_sleep = 99 + + def push_sleep_value(err): + sleep_values.append(inserted_sleep) + + with pytest.raises(RuntimeError): + await retry_target_stream( + error_target, + predicate=lambda x: True, + sleep_generator=sleep_values, + on_error=push_sleep_value, + ).__anext__() + assert sleep.call_count == 1 + sleep.assert_called_once_with(inserted_sleep) + + +class TestAsyncStreamingRetry(Test_BaseRetry): + def _make_one(self, *args, **kwargs): + return retry_streaming_async.AsyncStreamingRetry(*args, **kwargs) + + def test___str__(self): + def if_exception_type(exc): + return bool(exc) # pragma: NO COVER + + # Explicitly set all attributes as changed Retry defaults should not + # cause this test to start failing. + retry_ = retry_streaming_async.AsyncStreamingRetry( + predicate=if_exception_type, + initial=1.0, + maximum=60.0, + multiplier=2.0, + timeout=120.0, + on_error=None, + ) + assert re.match( + ( + r", " + r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, " + r"on_error=None>" + ), + str(retry_), + ) + + @staticmethod + async def _generator_mock( + num=5, + error_on=None, + exceptions_seen=None, + sleep_time=0, + ): + """ + Helper to create a mock generator that yields a number of values + Generator can optionally raise an exception on a specific iteration + + Args: + - num (int): the number of values to yield + - error_on (int): if given, the generator will raise a ValueError on the specified iteration + - exceptions_seen (list): if given, the generator will append any exceptions to this list before raising + - sleep_time (int): if given, the generator will asyncio.sleep for this many seconds before yielding each value + """ + try: + for i in range(num): + if sleep_time: + await asyncio.sleep(sleep_time) + if error_on is not None and i == error_on: + raise ValueError("generator mock error") + yield i + except (Exception, BaseException, GeneratorExit) as e: + # keep track of exceptions seen by generator + if exceptions_seen is not None: + exceptions_seen.append(e) + raise + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___generator_success(self, sleep): + """ + Test that a retry-decorated generator yields values as expected + This test checks a generator with no issues + """ + from collections.abc import AsyncGenerator + + retry_ = retry_streaming_async.AsyncStreamingRetry() + decorated = retry_(self._generator_mock) + + num = 10 + generator = await decorated(num) + # check types + assert isinstance(generator, AsyncGenerator) + assert isinstance(self._generator_mock(num), AsyncGenerator) + # check yield contents + unpacked = [i async for i in generator] + assert len(unpacked) == num + expected = [i async for i in self._generator_mock(num)] + for a, b in zip(unpacked, expected): + assert a == b + sleep.assert_not_called() + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___generator_retry(self, sleep): + """ + Tests that a retry-decorated generator will retry on errors + """ + on_error = mock.Mock(return_value=None) + retry_ = retry_streaming_async.AsyncStreamingRetry( + on_error=on_error, + predicate=retry_async.if_exception_type(ValueError), + timeout=None, + ) + generator = await retry_(self._generator_mock)(error_on=3) + # error thrown on 3 + # generator should contain 0, 1, 2 looping + unpacked = [await generator.__anext__() for i in range(10)] + assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0] + assert on_error.call_count == 3 + await generator.aclose() + + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.parametrize("use_deadline_arg", [True, False]) + @pytest.mark.asyncio + async def test___call___generator_retry_hitting_timeout( + self, sleep, uniform, use_deadline_arg + ): + """ + Tests that a retry-decorated generator will throw a RetryError + after using the time budget + """ + import time + + timeout_val = 9.9 + # support "deadline" as an alias for "timeout" + timeout_kwarg = ( + {"timeout": timeout_val} + if not use_deadline_arg + else {"deadline": timeout_val} + ) + + on_error = mock.Mock() + retry_ = retry_streaming_async.AsyncStreamingRetry( + predicate=retry_async.if_exception_type(ValueError), + initial=1.0, + maximum=1024.0, + multiplier=2.0, + **timeout_kwarg, + ) + + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", + return_value=time_now, + ) + + decorated = retry_(self._generator_mock, on_error=on_error) + generator = await decorated(error_on=1) + + with now_patcher as patched_now: + # Make sure that calls to fake asyncio.sleep() also advance the mocked + # time clock. + def increase_time(sleep_delay): + patched_now.return_value += sleep_delay + + sleep.side_effect = increase_time + + with pytest.raises(exceptions.RetryError): + [i async for i in generator] + + assert on_error.call_count == 4 + # check the delays + assert sleep.call_count == 3 # once between each successive target calls + last_wait = sleep.call_args.args[0] + total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) + # next wait would have put us over, so ended early + assert last_wait == 4 + assert total_wait == 7 + + @pytest.mark.asyncio + async def test___call___generator_cancellations(self): + """ + cancel calls should propagate to the generator + """ + # test without cancel as retryable + retry_ = retry_streaming_async.AsyncStreamingRetry() + utcnow = datetime.datetime.now(datetime.timezone.utc) + mock.patch("google.api_core.datetime_helpers.utcnow", return_value=utcnow) + generator = await retry_(self._generator_mock)(sleep_time=0.2) + assert await generator.__anext__() == 0 + task = asyncio.create_task(generator.__anext__()) + task.cancel() + with pytest.raises(asyncio.CancelledError): + await task + with pytest.raises(StopAsyncIteration): + await generator.__anext__() + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_generator_send(self, sleep): + """ + Send should be passed through retry into target generator + """ + + async def _mock_send_gen(): + """ + always yield whatever was sent in + """ + in_ = yield + while True: + in_ = yield in_ + + retry_ = retry_streaming_async.AsyncStreamingRetry() + + decorated = retry_(_mock_send_gen) + + generator = await decorated() + result = await generator.__anext__() + # first yield should be None + assert result is None + in_messages = ["test_1", "hello", "world"] + out_messages = [] + for msg in in_messages: + recv = await generator.asend(msg) + out_messages.append(recv) + assert in_messages == out_messages + await generator.aclose() + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___generator_send_retry(self, sleep): + """ + Send should be retried if target generator raises an error + """ + on_error = mock.Mock(return_value=None) + retry_ = retry_streaming_async.AsyncStreamingRetry( + on_error=on_error, + predicate=retry_async.if_exception_type(ValueError), + timeout=None, + ) + generator = await retry_(self._generator_mock)(error_on=3) + with pytest.raises(TypeError) as exc_info: + await generator.asend("cannot send to fresh generator") + assert exc_info.match("can't send non-None value") + await generator.aclose() + + # error thrown on 3 + # generator should contain 0, 1, 2 looping + generator = await retry_(self._generator_mock)(error_on=3) + assert await generator.__anext__() == 0 + unpacked = [await generator.asend(i) for i in range(10)] + assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1] + assert on_error.call_count == 3 + await generator.aclose() + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_generator_close(self, sleep): + """ + Close should be passed through retry into target generator + """ + retry_ = retry_streaming_async.AsyncStreamingRetry() + decorated = retry_(self._generator_mock) + exception_list = [] + generator = await decorated(10, exceptions_seen=exception_list) + for i in range(2): + await generator.__anext__() + await generator.aclose() + + assert isinstance(exception_list[0], GeneratorExit) + with pytest.raises(StopAsyncIteration): + # calling next on closed generator should raise error + await generator.__anext__() + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_new_generator_close(self, sleep): + """ + Close should be passed through retry into target generator, + even when it hasn't been iterated yet + """ + retry_ = retry_streaming_async.AsyncStreamingRetry() + decorated = retry_(self._generator_mock) + exception_list = [] + generator = await decorated(10, exceptions_seen=exception_list) + await generator.aclose() + + with pytest.raises(StopAsyncIteration): + # calling next on closed generator should raise error + await generator.__anext__() + + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_generator_throw(self, sleep): + """ + Throw should be passed through retry into target generator + """ + + # The generator should not retry when it encounters a non-retryable error + retry_ = retry_streaming_async.AsyncStreamingRetry( + predicate=retry_async.if_exception_type(ValueError), + ) + decorated = retry_(self._generator_mock) + exception_list = [] + generator = await decorated(10, exceptions_seen=exception_list) + for i in range(2): + await generator.__anext__() + with pytest.raises(BufferError): + await generator.athrow(BufferError("test")) + assert isinstance(exception_list[0], BufferError) + with pytest.raises(StopAsyncIteration): + # calling next on closed generator should raise error + await generator.__anext__() + + # In contrast, the generator should retry if we throw a retryable exception + exception_list = [] + generator = await decorated(10, exceptions_seen=exception_list) + for i in range(2): + await generator.__anext__() + throw_val = await generator.athrow(ValueError("test")) + assert throw_val == 0 + assert isinstance(exception_list[0], ValueError) + # calling next on generator should not raise error, because it was retried + assert await generator.__anext__() == 1 + + @pytest.mark.parametrize("awaitable_wrapped", [True, False]) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_iterable_send(self, sleep, awaitable_wrapped): + """ + Send should work like next if the wrapped iterable does not support it + """ + retry_ = retry_streaming_async.AsyncStreamingRetry() + + def iterable_fn(): + class CustomIterable: + def __init__(self): + self.i = -1 + + def __aiter__(self): + return self + + async def __anext__(self): + self.i += 1 + return self.i + + return CustomIterable() + + if awaitable_wrapped: + + async def wrapper(): + return iterable_fn() + + decorated = retry_(wrapper) + else: + decorated = retry_(iterable_fn) + + retryable = await decorated() + # initiate the generator by calling next + result = await retryable.__anext__() + assert result == 0 + # test sending values + assert await retryable.asend("test") == 1 + assert await retryable.asend("test2") == 2 + assert await retryable.asend("test3") == 3 + await retryable.aclose() + + @pytest.mark.parametrize("awaitable_wrapped", [True, False]) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_iterable_close(self, sleep, awaitable_wrapped): + """ + close should be handled by wrapper if wrapped iterable does not support it + """ + retry_ = retry_streaming_async.AsyncStreamingRetry() + + def iterable_fn(): + class CustomIterable: + def __init__(self): + self.i = -1 + + def __aiter__(self): + return self + + async def __anext__(self): + self.i += 1 + return self.i + + return CustomIterable() + + if awaitable_wrapped: + + async def wrapper(): + return iterable_fn() + + decorated = retry_(wrapper) + else: + decorated = retry_(iterable_fn) + + # try closing active generator + retryable = await decorated() + assert await retryable.__anext__() == 0 + await retryable.aclose() + with pytest.raises(StopAsyncIteration): + await retryable.__anext__() + # try closing new generator + new_retryable = await decorated() + await new_retryable.aclose() + with pytest.raises(StopAsyncIteration): + await new_retryable.__anext__() + + @pytest.mark.parametrize("awaitable_wrapped", [True, False]) + @mock.patch("asyncio.sleep", autospec=True) + @pytest.mark.asyncio + async def test___call___with_iterable_throw(self, sleep, awaitable_wrapped): + """ + Throw should work even if the wrapped iterable does not support it + """ + + predicate = retry_async.if_exception_type(ValueError) + retry_ = retry_streaming_async.AsyncStreamingRetry(predicate=predicate) + + def iterable_fn(): + class CustomIterable: + def __init__(self): + self.i = -1 + + def __aiter__(self): + return self + + async def __anext__(self): + self.i += 1 + return self.i + + return CustomIterable() + + if awaitable_wrapped: + + async def wrapper(): + return iterable_fn() + + decorated = retry_(wrapper) + else: + decorated = retry_(iterable_fn) + + # try throwing with active generator + retryable = await decorated() + assert await retryable.__anext__() == 0 + # should swallow errors in predicate + await retryable.athrow(ValueError("test")) + # should raise errors not in predicate + with pytest.raises(BufferError): + await retryable.athrow(BufferError("test")) + with pytest.raises(StopAsyncIteration): + await retryable.__anext__() + # try throwing with new generator + new_retryable = await decorated() + with pytest.raises(BufferError): + await new_retryable.athrow(BufferError("test")) + with pytest.raises(StopAsyncIteration): + await new_retryable.__anext__() + + @pytest.mark.asyncio + async def test_exc_factory_non_retryable_error(self): + """ + generator should give the option to override exception creation logic + test when non-retryable error is thrown + """ + from google.api_core.retry import RetryFailureReason + from google.api_core.retry.retry_streaming_async import retry_target_stream + + timeout = 6 + sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(kwargs) == 0 + assert args[0] == sent_errors + assert args[1] == RetryFailureReason.NON_RETRYABLE_ERROR + assert args[2] == timeout + return expected_final_err, expected_source_err + + generator = retry_target_stream( + self._generator_mock, + retry_async.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + ) + # initialize the generator + await generator.__anext__() + # trigger some retryable errors + await generator.athrow(sent_errors[0]) + await generator.athrow(sent_errors[1]) + # trigger a non-retryable error + with pytest.raises(expected_final_err.__class__) as exc_info: + await generator.athrow(sent_errors[2]) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err + + @pytest.mark.asyncio + async def test_exc_factory_timeout(self): + """ + generator should give the option to override exception creation logic + test when timeout is exceeded + """ + import time + from google.api_core.retry import RetryFailureReason + from google.api_core.retry.retry_streaming_async import retry_target_stream + + timeout = 2 + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", + return_value=time_now, + ) + + with now_patcher as patched_now: + timeout = 2 + sent_errors = [ValueError("test"), ValueError("test2"), ValueError("test3")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(kwargs) == 0 + assert args[0] == sent_errors + assert args[1] == RetryFailureReason.TIMEOUT + assert args[2] == timeout + return expected_final_err, expected_source_err + + generator = retry_target_stream( + self._generator_mock, + retry_async.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + ) + # initialize the generator + await generator.__anext__() + # trigger some retryable errors + await generator.athrow(sent_errors[0]) + await generator.athrow(sent_errors[1]) + # trigger a timeout + patched_now.return_value += timeout + 1 + with pytest.raises(expected_final_err.__class__) as exc_info: + await generator.athrow(sent_errors[2]) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err diff --git a/tests/asyncio/test_retry_async.py b/tests/asyncio/retry/test_retry_unary_async.py similarity index 65% rename from tests/asyncio/test_retry_async.py rename to tests/asyncio/retry/test_retry_unary_async.py index 14807eb5d..e7fdc963f 100644 --- a/tests/asyncio/test_retry_async.py +++ b/tests/asyncio/retry/test_retry_unary_async.py @@ -15,12 +15,18 @@ import datetime import re -import mock +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore import pytest from google.api_core import exceptions from google.api_core import retry_async +from ...unit.retry.test_retry_base import Test_BaseRetry + @mock.patch("asyncio.sleep", autospec=True) @mock.patch( @@ -97,23 +103,25 @@ async def test_retry_target_non_retryable_error(utcnow, sleep): @mock.patch("asyncio.sleep", autospec=True) -@mock.patch("google.api_core.datetime_helpers.utcnow", autospec=True) +@mock.patch("time.monotonic", autospec=True) +@pytest.mark.parametrize("use_deadline_arg", [True, False]) @pytest.mark.asyncio -async def test_retry_target_deadline_exceeded(utcnow, sleep): +async def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg): predicate = retry_async.if_exception_type(ValueError) exception = ValueError("meep") target = mock.Mock(side_effect=exception) # Setup the timeline so that the first call takes 5 seconds but the second - # call takes 6, which puts the retry over the deadline. - utcnow.side_effect = [ - # The first call to utcnow establishes the start of the timeline. - datetime.datetime.min, - datetime.datetime.min + datetime.timedelta(seconds=5), - datetime.datetime.min + datetime.timedelta(seconds=11), - ] + # call takes 6, which puts the retry over the timeout. + monotonic.side_effect = [0, 5, 11] + + timeout_val = 10 + # support "deadline" as an alias for "timeout" + timeout_kwarg = ( + {"timeout": timeout_val} if not use_deadline_arg else {"deadline": timeout_val} + ) with pytest.raises(exceptions.RetryError) as exc_info: - await retry_async.retry_target(target, predicate, range(10), deadline=10) + await retry_async.retry_target(target, predicate, range(10), **timeout_kwarg) assert exc_info.value.cause == exception assert exc_info.match("Timeout of 10.0s exceeded") @@ -128,113 +136,39 @@ async def test_retry_target_deadline_exceeded(utcnow, sleep): @pytest.mark.asyncio async def test_retry_target_bad_sleep_generator(): with pytest.raises(ValueError, match="Sleep generator"): - await retry_async.retry_target( - mock.sentinel.target, mock.sentinel.predicate, [], None - ) + await retry_async.retry_target(mock.sentinel.target, lambda x: True, [], None) -class TestAsyncRetry: - def test_constructor_defaults(self): - retry_ = retry_async.AsyncRetry() - assert retry_._predicate == retry_async.if_transient_error - assert retry_._initial == 1 - assert retry_._maximum == 60 - assert retry_._multiplier == 2 - assert retry_._deadline == 120 - assert retry_._on_error is None - - def test_constructor_options(self): - _some_function = mock.Mock() - - retry_ = retry_async.AsyncRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=_some_function, - ) - assert retry_._predicate == mock.sentinel.predicate - assert retry_._initial == 1 - assert retry_._maximum == 2 - assert retry_._multiplier == 3 - assert retry_._deadline == 4 - assert retry_._on_error is _some_function - - def test_with_deadline(self): - retry_ = retry_async.AsyncRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_deadline(42) - assert retry_ is not new_retry - assert new_retry._deadline == 42 - - # the rest of the attributes should remain the same - assert new_retry._predicate is retry_._predicate - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - assert new_retry._on_error is retry_._on_error - - def test_with_predicate(self): - retry_ = retry_async.AsyncRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_predicate(mock.sentinel.predicate) - assert retry_ is not new_retry - assert new_retry._predicate == mock.sentinel.predicate - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - assert new_retry._on_error is retry_._on_error - - def test_with_delay_noop(self): - retry_ = retry_async.AsyncRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, +@mock.patch("asyncio.sleep", autospec=True) +@pytest.mark.asyncio +async def test_retry_target_dynamic_backoff(sleep): + """ + sleep_generator should be iterated after on_error, to support dynamic backoff + """ + sleep.side_effect = RuntimeError("stop after sleep") + # start with empty sleep generator; values are added after exception in push_sleep_value + sleep_values = [] + exception = ValueError("trigger retry") + error_target = mock.Mock(side_effect=exception) + inserted_sleep = 99 + + def push_sleep_value(err): + sleep_values.append(inserted_sleep) + + with pytest.raises(RuntimeError): + await retry_async.retry_target( + error_target, + predicate=lambda x: True, + sleep_generator=sleep_values, + on_error=push_sleep_value, ) - new_retry = retry_.with_delay() - assert retry_ is not new_retry - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier + assert sleep.call_count == 1 + sleep.assert_called_once_with(inserted_sleep) - def test_with_delay(self): - retry_ = retry_async.AsyncRetry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay(initial=1, maximum=2, multiplier=3) - assert retry_ is not new_retry - assert new_retry._initial == 1 - assert new_retry._maximum == 2 - assert new_retry._multiplier == 3 - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._predicate is retry_._predicate - assert new_retry._on_error is retry_._on_error +class TestAsyncRetry(Test_BaseRetry): + def _make_one(self, *args, **kwargs): + return retry_async.AsyncRetry(*args, **kwargs) def test___str__(self): def if_exception_type(exc): @@ -247,7 +181,7 @@ def if_exception_type(exc): initial=1.0, maximum=60.0, multiplier=2.0, - deadline=120.0, + timeout=120.0, on_error=None, ) assert re.match( @@ -280,7 +214,6 @@ async def test___call___and_execute_success(self, sleep): @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio async def test___call___and_execute_retry(self, sleep, uniform): - on_error = mock.Mock(spec=["__call__"], side_effect=[None]) retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError) @@ -304,21 +237,17 @@ async def test___call___and_execute_retry(self, sleep, uniform): @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio - async def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform): - + async def test___call___and_execute_retry_hitting_timeout(self, sleep, uniform): on_error = mock.Mock(spec=["__call__"], side_effect=[None] * 10) retry_ = retry_async.AsyncRetry( predicate=retry_async.if_exception_type(ValueError), initial=1.0, maximum=1024.0, multiplier=2.0, - deadline=9.9, + timeout=30.9, ) - utcnow = datetime.datetime.utcnow() - utcnow_patcher = mock.patch( - "google.api_core.datetime_helpers.utcnow", return_value=utcnow - ) + monotonic_patcher = mock.patch("time.monotonic", return_value=0) target = mock.AsyncMock(spec=["__call__"], side_effect=[ValueError()] * 10) # __name__ is needed by functools.partial. @@ -327,11 +256,11 @@ async def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform) decorated = retry_(target, on_error=on_error) target.assert_not_called() - with utcnow_patcher as patched_utcnow: + with monotonic_patcher as patched_monotonic: # Make sure that calls to fake asyncio.sleep() also advance the mocked # time clock. def increase_time(sleep_delay): - patched_utcnow.return_value += datetime.timedelta(seconds=sleep_delay) + patched_monotonic.return_value += sleep_delay sleep.side_effect = increase_time @@ -347,8 +276,17 @@ def increase_time(sleep_delay): last_wait = sleep.call_args.args[0] total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) - assert last_wait == 2.9 # and not 8.0, because the last delay was shortened - assert total_wait == 9.9 # the same as the deadline + assert last_wait == 8.0 + # Next attempt would be scheduled in 16 secs, 15 + 16 = 31 > 30.9, thus + # we do not even wait for it to be scheduled (30.9 is configured timeout). + # This changes the previous logic of shortening the last attempt to fit + # in the timeout. The previous logic was removed to make Python retry + # logic consistent with the other languages and to not disrupt the + # randomized retry delays distribution by artificially increasing a + # probability of scheduling two (instead of one) last attempts with very + # short delay between them, while the second retry having very low chance + # of succeeding anyways. + assert total_wait == 15.0 @mock.patch("asyncio.sleep", autospec=True) @pytest.mark.asyncio diff --git a/tests/asyncio/test_bidi_async.py b/tests/asyncio/test_bidi_async.py new file mode 100644 index 000000000..add685a96 --- /dev/null +++ b/tests/asyncio/test_bidi_async.py @@ -0,0 +1,320 @@ +# Copyright 2025, Google LLC All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import asyncio + +from unittest import mock + +try: + from unittest.mock import AsyncMock +except ImportError: # pragma: NO COVER + from mock import AsyncMock # type: ignore + + +import pytest + +try: + from grpc import aio +except ImportError: # pragma: NO COVER + pytest.skip("No GRPC", allow_module_level=True) + +from google.api_core import bidi_async +from google.api_core import exceptions + +# TODO: remove this when droppping support for "Python 3.10" and below. +if sys.version_info < (3, 10): # type: ignore[operator] + + def aiter(obj): + return obj.__aiter__() + + async def anext(obj): + return await obj.__anext__() + + +@pytest.mark.asyncio +class Test_AsyncRequestQueueGenerator: + async def test_bounded_consume(self): + call = mock.create_autospec(aio.Call, instance=True) + call.done.return_value = False + + q = asyncio.Queue() + await q.put(mock.sentinel.A) + await q.put(mock.sentinel.B) + + generator = bidi_async._AsyncRequestQueueGenerator(q) + generator.call = call + + items = [] + gen_aiter = aiter(generator) + + items.append(await anext(gen_aiter)) + items.append(await anext(gen_aiter)) + + # At this point, the queue is empty. The next call to anext will sleep. + # We make the call inactive. + call.done.return_value = True + + with pytest.raises(asyncio.TimeoutError): + await asyncio.wait_for(anext(gen_aiter), timeout=0.01) + + assert items == [mock.sentinel.A, mock.sentinel.B] + + async def test_yield_initial_and_exit(self): + q = asyncio.Queue() + call = mock.create_autospec(aio.Call, instance=True) + call.done.return_value = True + + generator = bidi_async._AsyncRequestQueueGenerator( + q, initial_request=mock.sentinel.A + ) + generator.call = call + + assert await anext(aiter(generator)) == mock.sentinel.A + + async def test_yield_initial_callable_and_exit(self): + q = asyncio.Queue() + call = mock.create_autospec(aio.Call, instance=True) + call.done.return_value = True + + generator = bidi_async._AsyncRequestQueueGenerator( + q, initial_request=lambda: mock.sentinel.A + ) + generator.call = call + + assert await anext(aiter(generator)) == mock.sentinel.A + + async def test_exit_when_inactive_with_item(self): + q = asyncio.Queue() + await q.put(mock.sentinel.A) + + call = mock.create_autospec(aio.Call, instance=True) + call.done.return_value = True + + generator = bidi_async._AsyncRequestQueueGenerator(q) + generator.call = call + + with pytest.raises( + StopAsyncIteration, + ): + assert await anext(aiter(generator)) + + # Make sure it put the item back. + assert not q.empty() + assert await q.get() == mock.sentinel.A + + async def test_exit_when_inactive_empty(self): + q = asyncio.Queue() + call = mock.create_autospec(aio.Call, instance=True) + call.done.return_value = True + + generator = bidi_async._AsyncRequestQueueGenerator(q) + generator.call = call + + with pytest.raises(asyncio.TimeoutError): + await asyncio.wait_for(anext(aiter(generator)), timeout=0.01) + + async def test_exit_with_stop(self): + q = asyncio.Queue() + await q.put(None) + call = mock.create_autospec(aio.Call, instance=True) + call.done.return_value = False + + generator = bidi_async._AsyncRequestQueueGenerator(q) + generator.call = call + + with pytest.raises(StopAsyncIteration): + assert await anext(aiter(generator)) + + +def make_async_rpc(): + """Makes a mock async RPC used to test Bidi classes.""" + call = mock.create_autospec(aio.StreamStreamCall, instance=True) + rpc = AsyncMock() + + def rpc_side_effect(request, metadata=None): + call.done.return_value = False + return call + + rpc.side_effect = rpc_side_effect + + def cancel_side_effect(): + call.done.return_value = True + return True + + call.cancel.side_effect = cancel_side_effect + call.read = AsyncMock() + + return rpc, call + + +class AsyncClosedCall: + def __init__(self, exception): + self.exception = exception + + def done(self): + return True + + async def read(self): + raise self.exception + + +class TestAsyncBidiRpc: + def test_initial_state(self): + bidi_rpc = bidi_async.AsyncBidiRpc(None) + assert bidi_rpc.is_active is False + + def test_done_callbacks(self): + bidi_rpc = bidi_async.AsyncBidiRpc(None) + callback = mock.Mock(spec=["__call__"]) + + bidi_rpc.add_done_callback(callback) + bidi_rpc._on_call_done(mock.sentinel.future) + + callback.assert_called_once_with(mock.sentinel.future) + + @pytest.mark.asyncio + @pytest.mark.skipif( + sys.version_info < (3, 8), # type: ignore[operator] + reason="Versions of Python below 3.8 don't provide support for assert_awaited_once", + ) + async def test_metadata(self): + rpc, call = make_async_rpc() + bidi_rpc = bidi_async.AsyncBidiRpc(rpc, metadata=mock.sentinel.A) + assert bidi_rpc._rpc_metadata == mock.sentinel.A + + await bidi_rpc.open() + assert bidi_rpc.call == call + rpc.assert_awaited_once() + assert rpc.call_args.kwargs["metadata"] == mock.sentinel.A + + @pytest.mark.asyncio + async def test_open(self): + rpc, call = make_async_rpc() + bidi_rpc = bidi_async.AsyncBidiRpc(rpc) + + await bidi_rpc.open() + + assert bidi_rpc.call == call + assert bidi_rpc.is_active + call.add_done_callback.assert_called_once_with(bidi_rpc._on_call_done) + + @pytest.mark.asyncio + async def test_open_error_already_open(self): + rpc, _ = make_async_rpc() + bidi_rpc = bidi_async.AsyncBidiRpc(rpc) + + await bidi_rpc.open() + + with pytest.raises(ValueError): + await bidi_rpc.open() + + @pytest.mark.asyncio + async def test_open_error_call_error(self): + rpc, _ = make_async_rpc() + expected_exception = exceptions.GoogleAPICallError( + "test", response=mock.sentinel.response + ) + rpc.side_effect = expected_exception + bidi_rpc = bidi_async.AsyncBidiRpc(rpc) + callback = mock.Mock(spec=["__call__"]) + bidi_rpc.add_done_callback(callback) + + with pytest.raises(exceptions.GoogleAPICallError) as exc_info: + await bidi_rpc.open() + + assert exc_info.value == expected_exception + callback.assert_called_once_with(mock.sentinel.response) + + @pytest.mark.asyncio + async def test_close(self): + rpc, call = make_async_rpc() + bidi_rpc = bidi_async.AsyncBidiRpc(rpc) + await bidi_rpc.open() + + await bidi_rpc.close() + + call.cancel.assert_called_once() + assert bidi_rpc.call is call + assert bidi_rpc.is_active is False + # ensure the request queue was signaled to stop. + assert bidi_rpc.pending_requests == 1 + assert await bidi_rpc._request_queue.get() is None + # ensure request and callbacks are cleaned up + assert bidi_rpc._initial_request is None + assert not bidi_rpc._callbacks + + @pytest.mark.asyncio + async def test_close_with_no_rpc(self): + bidi_rpc = bidi_async.AsyncBidiRpc(None) + + await bidi_rpc.close() + + assert bidi_rpc.call is None + assert bidi_rpc.is_active is False + # ensure the request queue was signaled to stop. + assert bidi_rpc.pending_requests == 1 + assert await bidi_rpc._request_queue.get() is None + # ensure request and callbacks are cleaned up + assert bidi_rpc._initial_request is None + assert not bidi_rpc._callbacks + + @pytest.mark.asyncio + async def test_close_no_rpc(self): + bidi_rpc = bidi_async.AsyncBidiRpc(None) + await bidi_rpc.close() + + @pytest.mark.asyncio + async def test_send(self): + rpc, call = make_async_rpc() + bidi_rpc = bidi_async.AsyncBidiRpc(rpc) + await bidi_rpc.open() + + await bidi_rpc.send(mock.sentinel.request) + + assert bidi_rpc.pending_requests == 1 + assert await bidi_rpc._request_queue.get() is mock.sentinel.request + + @pytest.mark.asyncio + async def test_send_not_open(self): + bidi_rpc = bidi_async.AsyncBidiRpc(None) + + with pytest.raises(ValueError): + await bidi_rpc.send(mock.sentinel.request) + + @pytest.mark.asyncio + async def test_send_dead_rpc(self): + error = ValueError() + bidi_rpc = bidi_async.AsyncBidiRpc(None) + bidi_rpc.call = AsyncClosedCall(error) + + with pytest.raises(ValueError): + await bidi_rpc.send(mock.sentinel.request) + + @pytest.mark.asyncio + async def test_recv(self): + bidi_rpc = bidi_async.AsyncBidiRpc(None) + bidi_rpc.call = mock.create_autospec(aio.Call, instance=True) + bidi_rpc.call.read = AsyncMock(return_value=mock.sentinel.response) + + response = await bidi_rpc.recv() + + assert response == mock.sentinel.response + + @pytest.mark.asyncio + async def test_recv_not_open(self): + bidi_rpc = bidi_async.AsyncBidiRpc(None) + + with pytest.raises(ValueError): + await bidi_rpc.recv() diff --git a/tests/asyncio/test_grpc_helpers_async.py b/tests/asyncio/test_grpc_helpers_async.py index 95242f6b8..43700f22b 100644 --- a/tests/asyncio/test_grpc_helpers_async.py +++ b/tests/asyncio/test_grpc_helpers_async.py @@ -12,7 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore +from ..helpers import warn_deprecated_credentials_file import pytest # noqa: I202 try: @@ -97,12 +102,40 @@ async def test_common_methods_in_wrapped_call(): assert mock_call.wait_for_connection.call_count == 1 +@pytest.mark.asyncio +@pytest.mark.parametrize( + "callable_type,expected_wrapper_type", + [ + (grpc.aio.UnaryStreamMultiCallable, grpc_helpers_async._WrappedUnaryStreamCall), + (grpc.aio.StreamUnaryMultiCallable, grpc_helpers_async._WrappedStreamUnaryCall), + ( + grpc.aio.StreamStreamMultiCallable, + grpc_helpers_async._WrappedStreamStreamCall, + ), + ], +) +async def test_wrap_errors_w_stream_type(callable_type, expected_wrapper_type): + class ConcreteMulticallable(callable_type): + def __call__(self, *args, **kwargs): + raise NotImplementedError("Should not be called") + + with mock.patch.object( + grpc_helpers_async, "_wrap_stream_errors" + ) as wrap_stream_errors: + callable_ = ConcreteMulticallable() + grpc_helpers_async.wrap_errors(callable_) + assert wrap_stream_errors.call_count == 1 + wrap_stream_errors.assert_called_once_with(callable_, expected_wrapper_type) + + @pytest.mark.asyncio async def test_wrap_stream_errors_unary_stream(): mock_call = mock.Mock(aio.UnaryStreamCall, autospec=True) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedUnaryStreamCall + ) await wrapped_callable(1, 2, three="four") multicallable.assert_called_once_with(1, 2, three="four") @@ -114,7 +147,9 @@ async def test_wrap_stream_errors_stream_unary(): mock_call = mock.Mock(aio.StreamUnaryCall, autospec=True) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedStreamUnaryCall + ) await wrapped_callable(1, 2, three="four") multicallable.assert_called_once_with(1, 2, three="four") @@ -126,24 +161,15 @@ async def test_wrap_stream_errors_stream_stream(): mock_call = mock.Mock(aio.StreamStreamCall, autospec=True) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedStreamStreamCall + ) await wrapped_callable(1, 2, three="four") multicallable.assert_called_once_with(1, 2, three="four") assert mock_call.wait_for_connection.call_count == 1 -@pytest.mark.asyncio -async def test_wrap_stream_errors_type_error(): - mock_call = mock.Mock() - multicallable = mock.Mock(return_value=mock_call) - - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) - - with pytest.raises(TypeError): - await wrapped_callable() - - @pytest.mark.asyncio async def test_wrap_stream_errors_raised(): grpc_error = RpcErrorImpl(grpc.StatusCode.INVALID_ARGUMENT) @@ -151,7 +177,9 @@ async def test_wrap_stream_errors_raised(): mock_call.wait_for_connection = mock.AsyncMock(side_effect=[grpc_error]) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedStreamStreamCall + ) with pytest.raises(exceptions.InvalidArgument): await wrapped_callable() @@ -166,7 +194,9 @@ async def test_wrap_stream_errors_read(): mock_call.read = mock.AsyncMock(side_effect=grpc_error) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedStreamStreamCall + ) wrapped_call = await wrapped_callable(1, 2, three="four") multicallable.assert_called_once_with(1, 2, three="four") @@ -189,7 +219,9 @@ async def test_wrap_stream_errors_aiter(): mock_call.__aiter__ = mock.Mock(return_value=mocked_aiter) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedStreamStreamCall + ) wrapped_call = await wrapped_callable() with pytest.raises(exceptions.InvalidArgument) as exc_info: @@ -210,7 +242,9 @@ async def test_wrap_stream_errors_aiter_non_rpc_error(): mock_call.__aiter__ = mock.Mock(return_value=mocked_aiter) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedStreamStreamCall + ) wrapped_call = await wrapped_callable() with pytest.raises(TypeError) as exc_info: @@ -224,7 +258,9 @@ async def test_wrap_stream_errors_aiter_called_multiple_times(): mock_call = mock.Mock(aio.StreamStreamCall, autospec=True) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedStreamStreamCall + ) wrapped_call = await wrapped_callable() assert wrapped_call.__aiter__() == wrapped_call.__aiter__() @@ -239,7 +275,9 @@ async def test_wrap_stream_errors_write(): mock_call.done_writing = mock.AsyncMock(side_effect=[None, grpc_error]) multicallable = mock.Mock(return_value=mock_call) - wrapped_callable = grpc_helpers_async._wrap_stream_errors(multicallable) + wrapped_callable = grpc_helpers_async._wrap_stream_errors( + multicallable, grpc_helpers_async._WrappedStreamStreamCall + ) wrapped_call = await wrapped_callable() @@ -266,6 +304,28 @@ def test_wrap_errors_non_streaming(wrap_unary_errors): wrap_unary_errors.assert_called_once_with(callable_) +def test_grpc_async_stream(): + """ + GrpcAsyncStream type should be both an AsyncIterator and a grpc.aio.Call. + """ + instance = grpc_helpers_async.GrpcAsyncStream[int]() + assert isinstance(instance, grpc.aio.Call) + # should implement __aiter__ and __anext__ + assert hasattr(instance, "__aiter__") + it = instance.__aiter__() + assert hasattr(it, "__anext__") + + +def test_awaitable_grpc_call(): + """ + AwaitableGrpcCall type should be an Awaitable and a grpc.aio.Call. + """ + instance = grpc_helpers_async.AwaitableGrpcCall() + assert isinstance(instance, grpc.aio.Call) + # should implement __await__ + assert hasattr(instance, "__await__") + + @mock.patch("google.api_core.grpc_helpers_async._wrap_stream_errors") def test_wrap_errors_streaming(wrap_stream_errors): callable_ = mock.create_autospec(aio.UnaryStreamMultiCallable) @@ -273,37 +333,67 @@ def test_wrap_errors_streaming(wrap_stream_errors): result = grpc_helpers_async.wrap_errors(callable_) assert result == wrap_stream_errors.return_value - wrap_stream_errors.assert_called_once_with(callable_) + wrap_stream_errors.assert_called_once_with( + callable_, grpc_helpers_async._WrappedUnaryStreamCall + ) -@mock.patch("grpc.composite_channel_credentials") +@pytest.mark.parametrize( + "attempt_direct_path,target,expected_target", + [ + (None, "example.com:443", "example.com:443"), + (False, "example.com:443", "example.com:443"), + (True, "example.com:443", "google-c2p:///example.com"), + (True, "dns:///example.com", "google-c2p:///example.com"), + (True, "another-c2p:///example.com", "another-c2p:///example.com"), + ], +) +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch( "google.auth.default", autospec=True, return_value=(mock.sentinel.credentials, mock.sentinel.project), ) @mock.patch("grpc.aio.secure_channel") -def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_call): - target = "example.com:443" +def test_create_channel_implicit( + grpc_secure_channel, + google_auth_default, + composite_creds_call, + attempt_direct_path, + target, + expected_target, +): composite_creds = composite_creds_call.return_value - channel = grpc_helpers_async.create_channel(target) + channel = grpc_helpers_async.create_channel( + target, attempt_direct_path=attempt_direct_path + ) assert channel is grpc_secure_channel.return_value - default.assert_called_once_with(scopes=None, default_scopes=None) + google_auth_default.assert_called_once_with(scopes=None, default_scopes=None) grpc_secure_channel.assert_called_once_with( - target, composite_creds, compression=None + expected_target, composite_creds, compression=None ) +@pytest.mark.parametrize( + "attempt_direct_path,target, expected_target", + [ + (None, "example.com:443", "example.com:443"), + (False, "example.com:443", "example.com:443"), + (True, "example.com:443", "google-c2p:///example.com"), + (True, "dns:///example.com", "google-c2p:///example.com"), + (True, "another-c2p:///example.com", "another-c2p:///example.com"), + ], +) @mock.patch("google.auth.transport.grpc.AuthMetadataPlugin", autospec=True) @mock.patch( "google.auth.transport.requests.Request", autospec=True, return_value=mock.sentinel.Request, ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch( "google.auth.default", autospec=True, @@ -311,25 +401,40 @@ def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_c ) @mock.patch("grpc.aio.secure_channel") def test_create_channel_implicit_with_default_host( - grpc_secure_channel, default, composite_creds_call, request, auth_metadata_plugin + grpc_secure_channel, + google_auth_default, + composite_creds_call, + request, + auth_metadata_plugin, + attempt_direct_path, + target, + expected_target, ): - target = "example.com:443" default_host = "example.com" composite_creds = composite_creds_call.return_value - channel = grpc_helpers_async.create_channel(target, default_host=default_host) + channel = grpc_helpers_async.create_channel( + target, default_host=default_host, attempt_direct_path=attempt_direct_path + ) assert channel is grpc_secure_channel.return_value - default.assert_called_once_with(scopes=None, default_scopes=None) + google_auth_default.assert_called_once_with(scopes=None, default_scopes=None) auth_metadata_plugin.assert_called_once_with( mock.sentinel.credentials, mock.sentinel.Request, default_host=default_host ) grpc_secure_channel.assert_called_once_with( - target, composite_creds, compression=None + expected_target, composite_creds, compression=None ) +@pytest.mark.parametrize( + "attempt_direct_path", + [ + None, + False, + ], +) @mock.patch("grpc.composite_channel_credentials") @mock.patch( "google.auth.default", @@ -337,13 +442,15 @@ def test_create_channel_implicit_with_default_host( ) @mock.patch("grpc.aio.secure_channel") def test_create_channel_implicit_with_ssl_creds( - grpc_secure_channel, default, composite_creds_call + grpc_secure_channel, default, composite_creds_call, attempt_direct_path ): target = "example.com:443" ssl_creds = grpc.ssl_channel_credentials() - grpc_helpers_async.create_channel(target, ssl_credentials=ssl_creds) + grpc_helpers_async.create_channel( + target, ssl_credentials=ssl_creds, attempt_direct_path=attempt_direct_path + ) default.assert_called_once_with(scopes=None, default_scopes=None) composite_creds_call.assert_called_once_with(ssl_creds, mock.ANY) @@ -353,7 +460,18 @@ def test_create_channel_implicit_with_ssl_creds( ) -@mock.patch("grpc.composite_channel_credentials") +def test_create_channel_implicit_with_ssl_creds_attempt_direct_path_true(): + target = "example.com:443" + ssl_creds = grpc.ssl_channel_credentials() + with pytest.raises( + ValueError, match="Using ssl_credentials with Direct Path is not supported" + ): + grpc_helpers_async.create_channel( + target, ssl_credentials=ssl_creds, attempt_direct_path=True + ) + + +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch( "google.auth.default", autospec=True, @@ -376,7 +494,7 @@ def test_create_channel_implicit_with_scopes( ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch( "google.auth.default", autospec=True, @@ -405,16 +523,17 @@ def test_create_channel_explicit_with_duplicate_credentials(): target = "example:443" with pytest.raises(exceptions.DuplicateCredentialArgs) as excinfo: - grpc_helpers_async.create_channel( - target, - credentials_file="credentials.json", - credentials=mock.sentinel.credentials, - ) + with warn_deprecated_credentials_file(): + grpc_helpers_async.create_channel( + target, + credentials_file="credentials.json", + credentials=mock.sentinel.credentials, + ) assert "mutually exclusive" in str(excinfo.value) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("google.auth.credentials.with_scopes_if_required", autospec=True) @mock.patch("grpc.aio.secure_channel") def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_creds_call): @@ -434,7 +553,7 @@ def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_cred ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.aio.secure_channel") def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_call): target = "example.com:443" @@ -458,7 +577,7 @@ def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_cal ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.aio.secure_channel") def test_create_channel_explicit_default_scopes( grpc_secure_channel, composite_creds_call @@ -486,7 +605,7 @@ def test_create_channel_explicit_default_scopes( ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.aio.secure_channel") def test_create_channel_explicit_with_quota_project( grpc_secure_channel, composite_creds_call @@ -509,7 +628,7 @@ def test_create_channel_explicit_with_quota_project( ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.aio.secure_channel") @mock.patch( "google.auth.load_credentials_from_file", @@ -524,9 +643,10 @@ def test_create_channel_with_credentials_file( credentials_file = "/path/to/credentials/file.json" composite_creds = composite_creds_call.return_value - channel = grpc_helpers_async.create_channel( - target, credentials_file=credentials_file - ) + with warn_deprecated_credentials_file(): + channel = grpc_helpers_async.create_channel( + target, credentials_file=credentials_file + ) google.auth.load_credentials_from_file.assert_called_once_with( credentials_file, scopes=None, default_scopes=None @@ -537,7 +657,7 @@ def test_create_channel_with_credentials_file( ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.aio.secure_channel") @mock.patch( "google.auth.load_credentials_from_file", @@ -553,9 +673,10 @@ def test_create_channel_with_credentials_file_and_scopes( credentials_file = "/path/to/credentials/file.json" composite_creds = composite_creds_call.return_value - channel = grpc_helpers_async.create_channel( - target, credentials_file=credentials_file, scopes=scopes - ) + with warn_deprecated_credentials_file(): + channel = grpc_helpers_async.create_channel( + target, credentials_file=credentials_file, scopes=scopes + ) google.auth.load_credentials_from_file.assert_called_once_with( credentials_file, scopes=scopes, default_scopes=None @@ -566,7 +687,7 @@ def test_create_channel_with_credentials_file_and_scopes( ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.aio.secure_channel") @mock.patch( "google.auth.load_credentials_from_file", @@ -582,9 +703,10 @@ def test_create_channel_with_credentials_file_and_default_scopes( credentials_file = "/path/to/credentials/file.json" composite_creds = composite_creds_call.return_value - channel = grpc_helpers_async.create_channel( - target, credentials_file=credentials_file, default_scopes=default_scopes - ) + with warn_deprecated_credentials_file(): + channel = grpc_helpers_async.create_channel( + target, credentials_file=credentials_file, default_scopes=default_scopes + ) google.auth.load_credentials_from_file.assert_called_once_with( credentials_file, scopes=None, default_scopes=default_scopes diff --git a/tests/asyncio/test_operation_async.py b/tests/asyncio/test_operation_async.py index 127ba6343..5b2f012bf 100644 --- a/tests/asyncio/test_operation_async.py +++ b/tests/asyncio/test_operation_async.py @@ -13,9 +13,14 @@ # limitations under the License. -import mock import pytest +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore + try: import grpc # noqa: F401 except ImportError: # pragma: NO COVER @@ -79,7 +84,8 @@ async def test_constructor(): assert await future.running() -def test_metadata(): +@pytest.mark.asyncio +async def test_metadata(): expected_metadata = struct_pb2.Struct() future, _, _ = make_operation_future( [make_operation_proto(metadata=expected_metadata)] @@ -171,7 +177,8 @@ async def test_unexpected_result(unused_sleep): assert "Unexpected state" in "{!r}".format(exception) -def test_from_gapic(): +@pytest.mark.asyncio +async def test_from_gapic(): operation_proto = make_operation_proto(done=True) operations_client = mock.create_autospec( operations_v1.OperationsClient, instance=True diff --git a/tests/asyncio/test_page_iterator_async.py b/tests/asyncio/test_page_iterator_async.py index 75f9e1cf4..63e26d02e 100644 --- a/tests/asyncio/test_page_iterator_async.py +++ b/tests/asyncio/test_page_iterator_async.py @@ -14,7 +14,11 @@ import inspect -import mock +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore import pytest from google.api_core import page_iterator_async @@ -106,6 +110,7 @@ async def test__page_aiter_increment(self): await page_aiter.__anext__() assert iterator.num_results == 1 + await page_aiter.aclose() @pytest.mark.asyncio async def test__page_aiter_no_increment(self): @@ -118,6 +123,7 @@ async def test__page_aiter_no_increment(self): # results should still be 0 after fetching a page. assert iterator.num_results == 0 + await page_aiter.aclose() @pytest.mark.asyncio async def test__items_aiter(self): diff --git a/tests/asyncio/test_rest_streaming_async.py b/tests/asyncio/test_rest_streaming_async.py new file mode 100644 index 000000000..13549c7f9 --- /dev/null +++ b/tests/asyncio/test_rest_streaming_async.py @@ -0,0 +1,376 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# TODO: set random.seed explicitly in each test function. +# See related issue: https://github.com/googleapis/python-api-core/issues/689. + +import datetime +import logging +import random +import time +from typing import List, AsyncIterator + +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore + +import pytest # noqa: I202 + +import proto + +try: + from google.auth.aio.transport import Response +except ImportError: + pytest.skip( + "google-api-core[async_rest] is required to test asynchronous rest streaming.", + allow_module_level=True, + ) + +from google.api_core import rest_streaming_async +from google.api import http_pb2 +from google.api import httpbody_pb2 + + +from ..helpers import Composer, Song, EchoResponse, parse_responses + + +__protobuf__ = proto.module(package=__name__) +SEED = int(time.time()) +logging.info(f"Starting async rest streaming tests with random seed: {SEED}") +random.seed(SEED) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +class ResponseMock(Response): + class _ResponseItr(AsyncIterator[bytes]): + def __init__(self, _response_bytes: bytes, random_split=False): + self._responses_bytes = _response_bytes + self._idx = 0 + self._random_split = random_split + + def __aiter__(self): + return self + + async def __anext__(self): + if self._idx >= len(self._responses_bytes): + raise StopAsyncIteration + if self._random_split: + n = random.randint(1, len(self._responses_bytes[self._idx :])) + else: + n = 1 + x = self._responses_bytes[self._idx : self._idx + n] + self._idx += n + return x + + def __init__( + self, + responses: List[proto.Message], + response_cls, + random_split=False, + ): + self._responses = responses + self._random_split = random_split + self._response_message_cls = response_cls + + def _parse_responses(self): + return parse_responses(self._response_message_cls, self._responses) + + @property + async def headers(self): + raise NotImplementedError() + + @property + async def status_code(self): + raise NotImplementedError() + + async def close(self): + raise NotImplementedError() + + async def content(self, chunk_size=None): + itr = self._ResponseItr( + self._parse_responses(), random_split=self._random_split + ) + async for chunk in itr: + yield chunk + + async def read(self): + raise NotImplementedError() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "random_split,resp_message_is_proto_plus", + [(False, True), (False, False)], +) +async def test_next_simple(random_split, resp_message_is_proto_plus): + if resp_message_is_proto_plus: + response_type = EchoResponse + responses = [EchoResponse(content="hello world"), EchoResponse(content="yes")] + else: + response_type = httpbody_pb2.HttpBody + responses = [ + httpbody_pb2.HttpBody(content_type="hello world"), + httpbody_pb2.HttpBody(content_type="yes"), + ] + + resp = ResponseMock( + responses=responses, random_split=random_split, response_cls=response_type + ) + itr = rest_streaming_async.AsyncResponseIterator(resp, response_type) + idx = 0 + async for response in itr: + assert response == responses[idx] + idx += 1 + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "random_split,resp_message_is_proto_plus", + [ + (True, True), + (False, True), + (True, False), + (False, False), + ], +) +async def test_next_nested(random_split, resp_message_is_proto_plus): + if resp_message_is_proto_plus: + response_type = Song + responses = [ + Song(title="some song", composer=Composer(given_name="some name")), + Song(title="another song", date_added=datetime.datetime(2021, 12, 17)), + ] + else: + # Although `http_pb2.HttpRule`` is used in the response, any response message + # can be used which meets this criteria for the test of having a nested field. + response_type = http_pb2.HttpRule + responses = [ + http_pb2.HttpRule( + selector="some selector", + custom=http_pb2.CustomHttpPattern(kind="some kind"), + ), + http_pb2.HttpRule( + selector="another selector", + custom=http_pb2.CustomHttpPattern(path="some path"), + ), + ] + resp = ResponseMock( + responses=responses, random_split=random_split, response_cls=response_type + ) + itr = rest_streaming_async.AsyncResponseIterator(resp, response_type) + idx = 0 + async for response in itr: + assert response == responses[idx] + idx += 1 + assert idx == len(responses) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "random_split,resp_message_is_proto_plus", + [ + (True, True), + (False, True), + (True, False), + (False, False), + ], +) +async def test_next_stress(random_split, resp_message_is_proto_plus): + n = 50 + if resp_message_is_proto_plus: + response_type = Song + responses = [ + Song(title="title_%d" % i, composer=Composer(given_name="name_%d" % i)) + for i in range(n) + ] + else: + response_type = http_pb2.HttpRule + responses = [ + http_pb2.HttpRule( + selector="selector_%d" % i, + custom=http_pb2.CustomHttpPattern(path="path_%d" % i), + ) + for i in range(n) + ] + resp = ResponseMock( + responses=responses, random_split=random_split, response_cls=response_type + ) + itr = rest_streaming_async.AsyncResponseIterator(resp, response_type) + idx = 0 + async for response in itr: + assert response == responses[idx] + idx += 1 + assert idx == n + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "random_split,resp_message_is_proto_plus", + [ + (True, True), + (False, True), + (True, False), + (False, False), + ], +) +async def test_next_escaped_characters_in_string( + random_split, resp_message_is_proto_plus +): + if resp_message_is_proto_plus: + response_type = Song + composer_with_relateds = Composer() + relateds = ["Artist A", "Artist B"] + composer_with_relateds.relateds = relateds + + responses = [ + Song( + title='ti"tle\nfoo\tbar{}', composer=Composer(given_name="name\n\n\n") + ), + Song( + title='{"this is weird": "totally"}', + composer=Composer(given_name="\\{}\\"), + ), + Song(title='\\{"key": ["value",]}\\', composer=composer_with_relateds), + ] + else: + response_type = http_pb2.Http + responses = [ + http_pb2.Http( + rules=[ + http_pb2.HttpRule( + selector='ti"tle\nfoo\tbar{}', + custom=http_pb2.CustomHttpPattern(kind="name\n\n\n"), + ) + ] + ), + http_pb2.Http( + rules=[ + http_pb2.HttpRule( + selector='{"this is weird": "totally"}', + custom=http_pb2.CustomHttpPattern(kind="\\{}\\"), + ) + ] + ), + http_pb2.Http( + rules=[ + http_pb2.HttpRule( + selector='\\{"key": ["value",]}\\', + custom=http_pb2.CustomHttpPattern(kind="\\{}\\"), + ) + ] + ), + ] + resp = ResponseMock( + responses=responses, random_split=random_split, response_cls=response_type + ) + itr = rest_streaming_async.AsyncResponseIterator(resp, response_type) + idx = 0 + async for response in itr: + assert response == responses[idx] + idx += 1 + assert idx == len(responses) + + +@pytest.mark.asyncio +@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody]) +async def test_next_not_array(response_type): + data = '{"hello": 0}' + with mock.patch.object( + ResponseMock, "content", return_value=mock_async_gen(data) + ) as mock_method: + resp = ResponseMock(responses=[], response_cls=response_type) + itr = rest_streaming_async.AsyncResponseIterator(resp, response_type) + with pytest.raises(ValueError): + await itr.__anext__() + mock_method.assert_called_once() + + +@pytest.mark.asyncio +@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody]) +async def test_cancel(response_type): + with mock.patch.object( + ResponseMock, "close", new_callable=mock.AsyncMock + ) as mock_method: + resp = ResponseMock(responses=[], response_cls=response_type) + itr = rest_streaming_async.AsyncResponseIterator(resp, response_type) + await itr.cancel() + mock_method.assert_called_once() + + +@pytest.mark.asyncio +@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody]) +async def test_iterator_as_context_manager(response_type): + with mock.patch.object( + ResponseMock, "close", new_callable=mock.AsyncMock + ) as mock_method: + resp = ResponseMock(responses=[], response_cls=response_type) + async with rest_streaming_async.AsyncResponseIterator(resp, response_type): + pass + mock_method.assert_called_once() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "response_type,return_value", + [ + (EchoResponse, bytes('[{"content": "hello"}, {', "utf-8")), + (httpbody_pb2.HttpBody, bytes('[{"content_type": "hello"}, {', "utf-8")), + ], +) +async def test_check_buffer(response_type, return_value): + with mock.patch.object( + ResponseMock, + "_parse_responses", + return_value=return_value, + ): + resp = ResponseMock(responses=[], response_cls=response_type) + itr = rest_streaming_async.AsyncResponseIterator(resp, response_type) + with pytest.raises(ValueError): + await itr.__anext__() + await itr.__anext__() + + +@pytest.mark.asyncio +@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody]) +async def test_next_html(response_type): + data = "" + with mock.patch.object( + ResponseMock, "content", return_value=mock_async_gen(data) + ) as mock_method: + resp = ResponseMock(responses=[], response_cls=response_type) + + itr = rest_streaming_async.AsyncResponseIterator(resp, response_type) + with pytest.raises(ValueError): + await itr.__anext__() + mock_method.assert_called_once() + + +@pytest.mark.asyncio +async def test_invalid_response_class(): + class SomeClass: + pass + + resp = ResponseMock(responses=[], response_cls=SomeClass) + with pytest.raises( + ValueError, + match="Response message class must be a subclass of proto.Message or google.protobuf.message.Message", + ): + rest_streaming_async.AsyncResponseIterator(resp, SomeClass) diff --git a/tests/helpers.py b/tests/helpers.py new file mode 100644 index 000000000..4c7d5db3e --- /dev/null +++ b/tests/helpers.py @@ -0,0 +1,82 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helpers for tests""" + +import functools +import logging +import pytest # noqa: I202 +from typing import List + +import proto + +from google.protobuf import duration_pb2 +from google.protobuf import timestamp_pb2 +from google.protobuf.json_format import MessageToJson + + +class Genre(proto.Enum): + GENRE_UNSPECIFIED = 0 + CLASSICAL = 1 + JAZZ = 2 + ROCK = 3 + + +class Composer(proto.Message): + given_name = proto.Field(proto.STRING, number=1) + family_name = proto.Field(proto.STRING, number=2) + relateds = proto.RepeatedField(proto.STRING, number=3) + indices = proto.MapField(proto.STRING, proto.STRING, number=4) + + +class Song(proto.Message): + composer = proto.Field(Composer, number=1) + title = proto.Field(proto.STRING, number=2) + lyrics = proto.Field(proto.STRING, number=3) + year = proto.Field(proto.INT32, number=4) + genre = proto.Field(Genre, number=5) + is_five_mins_longer = proto.Field(proto.BOOL, number=6) + score = proto.Field(proto.DOUBLE, number=7) + likes = proto.Field(proto.INT64, number=8) + duration = proto.Field(duration_pb2.Duration, number=9) + date_added = proto.Field(timestamp_pb2.Timestamp, number=10) + + +class EchoResponse(proto.Message): + content = proto.Field(proto.STRING, number=1) + + +def parse_responses(response_message_cls, all_responses: List[proto.Message]) -> bytes: + # json.dumps returns a string surrounded with quotes that need to be stripped + # in order to be an actual JSON. + json_responses = [ + ( + response_message_cls.to_json(response).strip('"') + if issubclass(response_message_cls, proto.Message) + else MessageToJson(response).strip('"') + ) + for response in all_responses + ] + logging.info(f"Sending JSON stream: {json_responses}") + ret_val = "[{}]".format(",".join(json_responses)) + return bytes(ret_val, "utf-8") + + +warn_deprecated_credentials_file = functools.partial( + # This is used to test that the auth credentials file deprecation + # warning is emitted as expected. + pytest.warns, + DeprecationWarning, + match="argument is deprecated because of a potential security risk", +) diff --git a/tests/unit/future/test__helpers.py b/tests/unit/future/test__helpers.py index 98afc599f..a37efdd40 100644 --- a/tests/unit/future/test__helpers.py +++ b/tests/unit/future/test__helpers.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock +from unittest import mock from google.api_core.future import _helpers diff --git a/tests/unit/future/test_polling.py b/tests/unit/future/test_polling.py index f5d9b4f10..2f66f2304 100644 --- a/tests/unit/future/test_polling.py +++ b/tests/unit/future/test_polling.py @@ -15,8 +15,8 @@ import concurrent.futures import threading import time +from unittest import mock -import mock import pytest from google.api_core import exceptions, retry diff --git a/tests/unit/gapic/test_method.py b/tests/unit/gapic/test_method.py index d966f478f..29e8fc217 100644 --- a/tests/unit/gapic/test_method.py +++ b/tests/unit/gapic/test_method.py @@ -13,8 +13,8 @@ # limitations under the License. import datetime +from unittest import mock -import mock import pytest try: @@ -76,6 +76,7 @@ def test_wrap_method_with_custom_client_info(): api_core_version=3, gapic_version=4, client_library_version=5, + protobuf_runtime_version=6, ) method = mock.Mock(spec=["__call__"]) @@ -185,10 +186,13 @@ def test_wrap_method_with_overriding_retry_timeout_compression(unused_sleep): assert result == 42 assert method.call_count == 2 method.assert_called_with( - timeout=22, compression=grpc.Compression.Deflate, metadata=mock.ANY + timeout=22, + compression=grpc.Compression.Deflate, + metadata=mock.ANY, ) +@pytest.mark.skip(reason="Known flaky due to floating point comparison. #866") def test_wrap_method_with_overriding_timeout_as_a_number(): method = mock.Mock(spec=["__call__"], return_value=42) default_retry = retry.Retry() @@ -197,10 +201,34 @@ def test_wrap_method_with_overriding_timeout_as_a_number(): method, default_retry, default_timeout ) + # Using "result = wrapped_method(timeout=22)" fails since wrapped_method + # does floating point calculations that results in 21.987.. instead of 22 result = wrapped_method(timeout=22) assert result == 42 - method.assert_called_once_with(timeout=22, metadata=mock.ANY) + + actual_timeout = method.call_args[1]["timeout"] + metadata = method.call_args[1]["metadata"] + assert metadata == mock.ANY + assert actual_timeout == pytest.approx(22, abs=0.01) + + +def test_wrap_method_with_overriding_constant_timeout(): + method = mock.Mock(spec=["__call__"], return_value=42) + default_retry = retry.Retry() + default_timeout = timeout.ConstantTimeout(60) + wrapped_method = google.api_core.gapic_v1.method.wrap_method( + method, default_retry, default_timeout + ) + + result = wrapped_method(timeout=timeout.ConstantTimeout(22)) + + assert result == 42 + + actual_timeout = method.call_args[1]["timeout"] + metadata = method.call_args[1]["metadata"] + assert metadata == mock.ANY + assert actual_timeout == 22 def test_wrap_method_with_call(): diff --git a/tests/unit/gapic/test_routing_header.py b/tests/unit/gapic/test_routing_header.py index 2c8c75469..f0ec82ec6 100644 --- a/tests/unit/gapic/test_routing_header.py +++ b/tests/unit/gapic/test_routing_header.py @@ -90,8 +90,8 @@ def test__urlencode_param(key, value, expected): def test__urlencode_param_caching_performance(): import time - key = "key" * 100 - value = "value" * 100 + key = "key" * 10000 + value = "value" * 10000 # time with empty cache start_time = time.perf_counter() routing_header._urlencode_param(key, value) diff --git a/tests/unit/operations_v1/test_operations_rest_client.py b/tests/unit/operations_v1/test_operations_rest_client.py index 4ab4f1f75..a3189cf5f 100644 --- a/tests/unit/operations_v1/test_operations_rest_client.py +++ b/tests/unit/operations_v1/test_operations_rest_client.py @@ -15,30 +15,51 @@ # import os -import mock +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore + import pytest +from typing import Any, List +from ...helpers import warn_deprecated_credentials_file try: import grpc # noqa: F401 except ImportError: # pragma: NO COVER pytest.skip("No GRPC", allow_module_level=True) from requests import Response # noqa I201 -from requests.sessions import Session +from google.auth.transport.requests import AuthorizedSession from google.api_core import client_options from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 +from google.api_core import parse_version_to_tuple from google.api_core.operations_v1 import AbstractOperationsClient + +import google.auth from google.api_core.operations_v1 import pagers +from google.api_core.operations_v1 import pagers_async from google.api_core.operations_v1 import transports -import google.auth from google.auth import credentials as ga_credentials +from google.auth import __version__ as auth_version from google.auth.exceptions import MutualTLSChannelError from google.longrunning import operations_pb2 from google.oauth2 import service_account from google.protobuf import json_format # type: ignore from google.rpc import status_pb2 # type: ignore +try: + import aiohttp # noqa: F401 + import google.auth.aio.transport + from google.auth.aio.transport.sessions import AsyncAuthorizedSession + from google.api_core.operations_v1 import AsyncOperationsRestClient + from google.auth.aio import credentials as ga_credentials_async + + GOOGLE_AUTH_AIO_INSTALLED = True +except ImportError: + GOOGLE_AUTH_AIO_INSTALLED = False HTTP_OPTIONS = { "google.longrunning.Operations.CancelOperation": [ @@ -55,17 +76,62 @@ ], } +PYPARAM_CLIENT: List[Any] = [ + AbstractOperationsClient, +] +PYPARAM_CLIENT_TRANSPORT_NAME = [ + [AbstractOperationsClient, transports.OperationsRestTransport, "rest"], +] +PYPARAM_CLIENT_TRANSPORT_CREDENTIALS = [ + [ + AbstractOperationsClient, + transports.OperationsRestTransport, + ga_credentials.AnonymousCredentials(), + ], +] + +if GOOGLE_AUTH_AIO_INSTALLED: + PYPARAM_CLIENT.append(AsyncOperationsRestClient) + PYPARAM_CLIENT_TRANSPORT_NAME.append( + [ + AsyncOperationsRestClient, + transports.AsyncOperationsRestTransport, + "rest_asyncio", + ] + ) + PYPARAM_CLIENT_TRANSPORT_CREDENTIALS.append( + [ + AsyncOperationsRestClient, + transports.AsyncOperationsRestTransport, + ga_credentials_async.AnonymousCredentials(), + ] + ) + def client_cert_source_callback(): return b"cert bytes", b"key bytes" -def _get_operations_client(http_options=HTTP_OPTIONS): - transport = transports.rest.OperationsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), http_options=http_options +def _get_session_type(is_async: bool): + return ( + AsyncAuthorizedSession + if is_async and GOOGLE_AUTH_AIO_INSTALLED + else AuthorizedSession ) - return AbstractOperationsClient(transport=transport) + +def _get_operations_client(is_async: bool, http_options=HTTP_OPTIONS): + if is_async and GOOGLE_AUTH_AIO_INSTALLED: + async_transport = transports.rest_asyncio.AsyncOperationsRestTransport( + credentials=ga_credentials_async.AnonymousCredentials(), + http_options=http_options, + ) + return AsyncOperationsRestClient(transport=async_transport) + else: + sync_transport = transports.rest.OperationsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), http_options=http_options + ) + return AbstractOperationsClient(transport=sync_transport) # If default endpoint is localhost, then default mtls endpoint will be the same. @@ -79,57 +145,69 @@ def modify_default_endpoint(client): ) -def test__get_default_mtls_endpoint(): +# TODO: Add support for mtls in async rest +@pytest.mark.parametrize( + "client_class", + [ + AbstractOperationsClient, + ], +) +def test__get_default_mtls_endpoint(client_class): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" sandbox_endpoint = "example.sandbox.googleapis.com" sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" non_googleapi = "api.example.com" - assert AbstractOperationsClient._get_default_mtls_endpoint(None) is None - assert ( - AbstractOperationsClient._get_default_mtls_endpoint(api_endpoint) - == api_mtls_endpoint - ) + assert client_class._get_default_mtls_endpoint(None) is None + assert client_class._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint assert ( - AbstractOperationsClient._get_default_mtls_endpoint(api_mtls_endpoint) - == api_mtls_endpoint + client_class._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint ) assert ( - AbstractOperationsClient._get_default_mtls_endpoint(sandbox_endpoint) + client_class._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint ) assert ( - AbstractOperationsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + client_class._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint ) - assert ( - AbstractOperationsClient._get_default_mtls_endpoint(non_googleapi) - == non_googleapi - ) + assert client_class._get_default_mtls_endpoint(non_googleapi) == non_googleapi -@pytest.mark.parametrize("client_class", [AbstractOperationsClient]) +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) def test_operations_client_from_service_account_info(client_class): creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_info" - ) as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) + if "async" in str(client_class): + # TODO(): Add support for service account info to async REST transport. + with pytest.raises(NotImplementedError): + info = {"valid": True} + client_class.from_service_account_info(info) + else: + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) - assert client.transport._host == "https://longrunning.googleapis.com" + assert client.transport._host == "https://longrunning.googleapis.com" @pytest.mark.parametrize( - "transport_class,transport_name", [(transports.OperationsRestTransport, "rest")] + "transport_class", + [ + transports.OperationsRestTransport, + # TODO(https://github.com/googleapis/python-api-core/issues/706): Add support for + # service account credentials in transports.AsyncOperationsRestTransport + ], ) -def test_operations_client_service_account_always_use_jwt( - transport_class, transport_name -): +def test_operations_client_service_account_always_use_jwt(transport_class): with mock.patch.object( service_account.Credentials, "with_always_use_jwt_access", create=True ) as use_jwt: @@ -145,35 +223,52 @@ def test_operations_client_service_account_always_use_jwt( use_jwt.assert_not_called() -@pytest.mark.parametrize("client_class", [AbstractOperationsClient]) +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) def test_operations_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_file" - ) as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) + if "async" in str(client_class): + # TODO(): Add support for service account creds to async REST transport. + with pytest.raises(NotImplementedError): + client_class.from_service_account_file("dummy/file/path.json") + else: + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) - assert client.transport._host == "https://longrunning.googleapis.com" + assert client.transport._host == "https://longrunning.googleapis.com" -def test_operations_client_get_transport_class(): - transport = AbstractOperationsClient.get_transport_class() +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + PYPARAM_CLIENT_TRANSPORT_NAME, +) +def test_operations_client_get_transport_class( + client_class, transport_class, transport_name +): + transport = client_class.get_transport_class() available_transports = [ transports.OperationsRestTransport, ] + if GOOGLE_AUTH_AIO_INSTALLED: + available_transports.append(transports.AsyncOperationsRestTransport) assert transport in available_transports - transport = AbstractOperationsClient.get_transport_class("rest") - assert transport == transports.OperationsRestTransport + transport = client_class.get_transport_class(transport_name) + assert transport == transport_class +# TODO(): Update this test case to include async REST once we have support for MTLS. @pytest.mark.parametrize( "client_class,transport_class,transport_name", [(AbstractOperationsClient, transports.OperationsRestTransport, "rest")], @@ -186,22 +281,21 @@ def test_operations_client_get_transport_class(): def test_operations_client_client_options( client_class, transport_class, transport_name ): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(AbstractOperationsClient, "get_transport_class") as gtc: - transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) - client = client_class(transport=transport) - gtc.assert_not_called() + # # Check that if channel is provided we won't create a new one. + # with mock.patch.object(AbstractOperationsBaseClient, "get_transport_class") as gtc: + # client = client_class(transport=transport_class()) + # gtc.assert_not_called() - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(AbstractOperationsClient, "get_transport_class") as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() + # # Check that if channel is provided via str we will create a new one. + # with mock.patch.object(AbstractOperationsBaseClient, "get_transport_class") as gtc: + # client = client_class(transport=transport_name) + # gtc.assert_called() # Check the case api_endpoint is provided. options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -218,7 +312,7 @@ def test_operations_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -235,7 +329,7 @@ def test_operations_client_client_options( with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class() + client = client_class(transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -253,18 +347,36 @@ def test_operations_client_client_options( with pytest.raises(MutualTLSChannelError): client = client_class() - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): - with pytest.raises(ValueError): - client = client_class() + # Test behavior for google.auth versions < 2.43.0. + # These versions do not have the updated mtls.should_use_client_cert logic. + # Verify that a ValueError is raised when GOOGLE_API_USE_CLIENT_CERTIFICATE + # is set to an unsupported value, as expected in these older versions. + if parse_version_to_tuple(auth_version) < (2, 43, 0): + with pytest.raises(ValueError): + client = client_class() + # Test behavior for google.auth versions >= 2.43.0. + # In these versions, if GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an + # unsupported value (e.g., not 'true' or 'false'), the expected behavior + # of the internal google.auth.mtls.should_use_client_cert() function + # is to return False. Expect should_use_client_cert to return False, so + # client creation should proceed without requiring a client certificate. + else: + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -276,7 +388,25 @@ def test_operations_client_client_options( always_use_jwt_access=True, ) + # Check the case credentials_file is provided + with warn_deprecated_credentials_file(): + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +# TODO: Add support for mtls in async REST @pytest.mark.parametrize( "client_class,transport_class,transport_name,use_client_cert_env", [ @@ -393,7 +523,7 @@ def fake_init(client_cert_source_for_mtls=None, **kwargs): @pytest.mark.parametrize( "client_class,transport_class,transport_name", - [(AbstractOperationsClient, transports.OperationsRestTransport, "rest")], + PYPARAM_CLIENT_TRANSPORT_NAME, ) def test_operations_client_client_options_scopes( client_class, transport_class, transport_name @@ -402,52 +532,84 @@ def test_operations_client_client_options_scopes( options = client_options.ClientOptions( scopes=["1", "2"], ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) + if "async" in str(client_class): + # TODO(): Add support for scopes to async REST transport. + with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError): + client_class(client_options=options, transport=transport_name) + else: + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) @pytest.mark.parametrize( "client_class,transport_class,transport_name", - [(AbstractOperationsClient, transports.OperationsRestTransport, "rest")], + PYPARAM_CLIENT_TRANSPORT_NAME, ) def test_operations_client_client_options_credentials_file( client_class, transport_class, transport_name ): # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) + with warn_deprecated_credentials_file(): + options = client_options.ClientOptions(credentials_file="credentials.json") + if "async" in str(client_class): + # TODO(): Add support for credentials file to async REST transport. + with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError): + with warn_deprecated_credentials_file(): + client_class(client_options=options, transport=transport_name) + else: + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) -def test_list_operations_rest( - transport: str = "rest", request_type=operations_pb2.ListOperationsRequest -): - client = _get_operations_client() +@pytest.mark.parametrize( + "credentials_file", + [None, "credentials.json"], +) +@mock.patch( + "google.auth.default", + autospec=True, + return_value=(mock.sentinel.credentials, mock.sentinel.project), +) +def test_list_operations_rest(google_auth_default, credentials_file): + if credentials_file: + with warn_deprecated_credentials_file(): + sync_transport = transports.rest.OperationsRestTransport( + credentials_file=credentials_file, + http_options=HTTP_OPTIONS, + ) + else: + # no warning expected + sync_transport = transports.rest.OperationsRestTransport( + credentials_file=credentials_file, + http_options=HTTP_OPTIONS, + ) + + client = AbstractOperationsClient(transport=sync_transport) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: + with mock.patch.object(_get_session_type(is_async=False), "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.ListOperationsResponse( next_page_token="next_page_token_value", @@ -477,10 +639,49 @@ def test_list_operations_rest( assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_operations_rest_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + + client = _get_operations_client(is_async=True) + # Mock the http request call within the method and fake a response. + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) + req.return_value = response_value + response = await client.list_operations( + name="operations", filter_="my_filter", page_size=10, page_token="abc" + ) + + actual_args = req.call_args + assert actual_args.args[0] == "GET" + assert actual_args.args[1] == "https://longrunning.googleapis.com/v3/operations" + assert actual_args.kwargs["params"] == [ + ("filter", "my_filter"), + ("pageSize", 10), + ("pageToken", "abc"), + ] + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers_async.ListOperationsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + def test_list_operations_rest_failure(): - client = _get_operations_client(http_options=None) + client = _get_operations_client(is_async=False, http_options=None) - with mock.patch.object(Session, "request") as req: + with mock.patch.object(_get_session_type(is_async=False), "request") as req: response_value = Response() response_value.status_code = 400 mock_request = mock.MagicMock() @@ -492,13 +693,31 @@ def test_list_operations_rest_failure(): client.list_operations(name="operations") +@pytest.mark.asyncio +async def test_list_operations_rest_failure_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + + client = _get_operations_client(is_async=True, http_options=None) + + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + response_value = mock.Mock() + response_value.status_code = 400 + response_value.read = mock.AsyncMock(return_value=b"{}") + mock_request = mock.MagicMock() + mock_request.method = "GET" + mock_request.url = "https://longrunning.googleapis.com:443/v1/operations" + response_value.request = mock_request + req.return_value = response_value + with pytest.raises(core_exceptions.GoogleAPIError): + await client.list_operations(name="operations") + + def test_list_operations_rest_pager(): - client = AbstractOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), - ) + client = _get_operations_client(is_async=False, http_options=None) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: + with mock.patch.object(_get_session_type(is_async=False), "request") as req: # TODO(kbandes): remove this mock unless there's a good reason for it. # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages @@ -545,13 +764,80 @@ def test_list_operations_rest_pager(): assert page_.next_page_token == token -def test_get_operation_rest( - transport: str = "rest", request_type=operations_pb2.GetOperationRequest -): - client = _get_operations_client() +@pytest.mark.asyncio +async def test_list_operations_rest_pager_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + client = _get_operations_client(is_async=True, http_options=None) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + operations_pb2.ListOperationsResponse( + operations=[ + operations_pb2.Operation(), + operations_pb2.Operation(), + operations_pb2.Operation(), + ], + next_page_token="abc", + ), + operations_pb2.ListOperationsResponse( + operations=[], + next_page_token="def", + ), + operations_pb2.ListOperationsResponse( + operations=[operations_pb2.Operation()], + next_page_token="ghi", + ), + operations_pb2.ListOperationsResponse( + operations=[operations_pb2.Operation(), operations_pb2.Operation()], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(json_format.MessageToJson(x) for x in response) + return_values = tuple(mock.Mock() for i in response) + for return_val, response_val in zip(return_values, response): + return_val.read = mock.AsyncMock(return_value=response_val.encode("UTF-8")) + return_val.status_code = 200 + req.side_effect = return_values + + pager = await client.list_operations(name="operations") + + responses = [] + async for response in pager: + responses.append(response) + + results = list(responses) + assert len(results) == 6 + assert all(isinstance(i, operations_pb2.Operation) for i in results) + pager = await client.list_operations(name="operations") + + responses = [] + async for response in pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, operations_pb2.Operation) for i in results) + + pages = [] + + async for page in pager.pages: + pages.append(page) + for page_, token in zip(pages, ["", "", "", "abc", "def", "ghi", ""]): + assert page_.next_page_token == token + + +def test_get_operation_rest(): + client = _get_operations_client(is_async=False) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(_get_session_type(is_async=False), "request") as req: # Designate an appropriate value for the returned response. return_value = operations_pb2.Operation( name="operations/sample1", @@ -580,10 +866,46 @@ def test_get_operation_rest( assert response.done is True +@pytest.mark.asyncio +async def test_get_operation_rest_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + client = _get_operations_client(is_async=True) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation( + name="operations/sample1", + done=True, + error=status_pb2.Status(code=411), + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.read = mock.AsyncMock(return_value=json_return_value) + req.return_value = response_value + response = await client.get_operation("operations/sample1") + + actual_args = req.call_args + assert actual_args.args[0] == "GET" + assert ( + actual_args.args[1] + == "https://longrunning.googleapis.com/v3/operations/sample1" + ) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + assert response.name == "operations/sample1" + assert response.done is True + + def test_get_operation_rest_failure(): - client = _get_operations_client(http_options=None) + client = _get_operations_client(is_async=False, http_options=None) - with mock.patch.object(Session, "request") as req: + with mock.patch.object(_get_session_type(is_async=False), "request") as req: response_value = Response() response_value.status_code = 400 mock_request = mock.MagicMock() @@ -595,13 +917,30 @@ def test_get_operation_rest_failure(): client.get_operation("sample0/operations/sample1") -def test_delete_operation_rest( - transport: str = "rest", request_type=operations_pb2.DeleteOperationRequest -): - client = _get_operations_client() +@pytest.mark.asyncio +async def test_get_operation_rest_failure_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + client = _get_operations_client(is_async=True, http_options=None) + + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + response_value = mock.Mock() + response_value.status_code = 400 + response_value.read = mock.AsyncMock(return_value=b"{}") + mock_request = mock.MagicMock() + mock_request.method = "GET" + mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1" + response_value.request = mock_request + req.return_value = response_value + with pytest.raises(core_exceptions.GoogleAPIError): + await client.get_operation("sample0/operations/sample1") + + +def test_delete_operation_rest(): + client = _get_operations_client(is_async=False) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: + with mock.patch.object(_get_session_type(is_async=False), "request") as req: # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 @@ -618,10 +957,36 @@ def test_delete_operation_rest( ) +@pytest.mark.asyncio +async def test_delete_operation_rest_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + client = _get_operations_client(is_async=True) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) + req.return_value = response_value + await client.delete_operation(name="operations/sample1") + assert req.call_count == 1 + actual_args = req.call_args + assert actual_args.args[0] == "DELETE" + assert ( + actual_args.args[1] + == "https://longrunning.googleapis.com/v3/operations/sample1" + ) + + def test_delete_operation_rest_failure(): - client = _get_operations_client(http_options=None) + client = _get_operations_client(is_async=False, http_options=None) - with mock.patch.object(Session, "request") as req: + with mock.patch.object(_get_session_type(is_async=False), "request") as req: response_value = Response() response_value.status_code = 400 mock_request = mock.MagicMock() @@ -633,11 +998,30 @@ def test_delete_operation_rest_failure(): client.delete_operation(name="sample0/operations/sample1") -def test_cancel_operation_rest(transport: str = "rest"): - client = _get_operations_client() +@pytest.mark.asyncio +async def test_delete_operation_rest_failure_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + client = _get_operations_client(is_async=True, http_options=None) + + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + response_value = mock.Mock() + response_value.status_code = 400 + response_value.read = mock.AsyncMock(return_value=b"{}") + mock_request = mock.MagicMock() + mock_request.method = "DELETE" + mock_request.url = "https://longrunning.googleapis.com/v1/operations/sample1" + response_value.request = mock_request + req.return_value = response_value + with pytest.raises(core_exceptions.GoogleAPIError): + await client.delete_operation(name="sample0/operations/sample1") + + +def test_cancel_operation_rest(): + client = _get_operations_client(is_async=False) # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: + with mock.patch.object(_get_session_type(is_async=False), "request") as req: # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 @@ -654,10 +1038,36 @@ def test_cancel_operation_rest(transport: str = "rest"): ) +@pytest.mark.asyncio +async def test_cancel_operation_rest_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + client = _get_operations_client(is_async=True) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.read = mock.AsyncMock( + return_value=json_return_value.encode("UTF-8") + ) + req.return_value = response_value + await client.cancel_operation(name="operations/sample1") + assert req.call_count == 1 + actual_args = req.call_args + assert actual_args.args[0] == "POST" + assert ( + actual_args.args[1] + == "https://longrunning.googleapis.com/v3/operations/sample1:cancel" + ) + + def test_cancel_operation_rest_failure(): - client = _get_operations_client(http_options=None) + client = _get_operations_client(is_async=False, http_options=None) - with mock.patch.object(Session, "request") as req: + with mock.patch.object(_get_session_type(is_async=False), "request") as req: response_value = Response() response_value.status_code = 400 mock_request = mock.MagicMock() @@ -671,52 +1081,78 @@ def test_cancel_operation_rest_failure(): client.cancel_operation(name="sample0/operations/sample1") -def test_credentials_transport_error(): +@pytest.mark.asyncio +async def test_cancel_operation_rest_failure_async(): + if not GOOGLE_AUTH_AIO_INSTALLED: + pytest.skip("Skipped because google-api-core[async_rest] is not installed") + client = _get_operations_client(is_async=True, http_options=None) + + with mock.patch.object(_get_session_type(is_async=True), "request") as req: + response_value = mock.Mock() + response_value.status_code = 400 + response_value.read = mock.AsyncMock(return_value=b"{}") + mock_request = mock.MagicMock() + mock_request.method = "POST" + mock_request.url = ( + "https://longrunning.googleapis.com/v1/operations/sample1:cancel" + ) + response_value.request = mock_request + req.return_value = response_value + with pytest.raises(core_exceptions.GoogleAPIError): + await client.cancel_operation(name="sample0/operations/sample1") + + +@pytest.mark.parametrize( + "client_class,transport_class,credentials", + PYPARAM_CLIENT_TRANSPORT_CREDENTIALS, +) +def test_credentials_transport_error(client_class, transport_class, credentials): # It is an error to provide credentials and a transport instance. - transport = transports.OperationsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + transport = transport_class(credentials=credentials) with pytest.raises(ValueError): - AbstractOperationsClient( + client_class( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) # It is an error to provide a credentials file and a transport instance. - transport = transports.OperationsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + transport = transport_class(credentials=credentials) with pytest.raises(ValueError): - AbstractOperationsClient( + client_class( client_options={"credentials_file": "credentials.json"}, transport=transport, ) # It is an error to provide scopes and a transport instance. - transport = transports.OperationsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) + transport = transport_class(credentials=credentials) with pytest.raises(ValueError): - AbstractOperationsClient( + client_class( client_options={"scopes": ["1", "2"]}, transport=transport, ) -def test_transport_instance(): +@pytest.mark.parametrize( + "client_class,transport_class,credentials", + PYPARAM_CLIENT_TRANSPORT_CREDENTIALS, +) +def test_transport_instance(client_class, transport_class, credentials): # A client may be instantiated with a custom transport instance. - transport = transports.OperationsRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + transport = transport_class( + credentials=credentials, ) - client = AbstractOperationsClient(transport=transport) + client = client_class(transport=transport) assert client.transport is transport -@pytest.mark.parametrize("transport_class", [transports.OperationsRestTransport]) -def test_transport_adc(transport_class): +@pytest.mark.parametrize( + "client_class,transport_class,credentials", + PYPARAM_CLIENT_TRANSPORT_CREDENTIALS, +) +def test_transport_adc(client_class, transport_class, credentials): # Test default credentials are used if not provided. with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) + adc.return_value = (credentials, None) transport_class() adc.assert_called_once() @@ -724,10 +1160,11 @@ def test_transport_adc(transport_class): def test_operations_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transports.OperationsTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) + with warn_deprecated_credentials_file(): + transports.OperationsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) def test_operations_base_transport(): @@ -765,10 +1202,11 @@ def test_operations_base_transport_with_credentials_file(): ) as Transport: Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transports.OperationsTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) + with warn_deprecated_credentials_file(): + transports.OperationsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) load_creds.assert_called_once_with( "credentials.json", scopes=None, @@ -788,32 +1226,59 @@ def test_operations_base_transport_with_adc(): adc.assert_called_once() -def test_operations_auth_adc(): +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_operations_auth_adc(client_class): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(google.auth, "default", autospec=True) as adc: adc.return_value = (ga_credentials.AnonymousCredentials(), None) - AbstractOperationsClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=(), - quota_project_id=None, - ) + + if "async" in str(client_class).lower(): + # TODO(): Add support for adc to async REST transport. + # NOTE: Ideally, the logic for adc shouldn't be called if transport + # is set to async REST. If the user does not configure credentials + # of type `google.auth.aio.credentials.Credentials`, + # we should raise an exception to avoid the adc workflow. + with pytest.raises(google.auth.exceptions.InvalidType): + client_class() + else: + client_class() + adc.assert_called_once_with( + scopes=None, + default_scopes=(), + quota_project_id=None, + ) -def test_operations_http_transport_client_cert_source_for_mtls(): +# TODO(https://github.com/googleapis/python-api-core/issues/705): Add +# testing for `transports.AsyncOperationsRestTransport` once MTLS is supported +# in `google.auth.aio.transport`. +@pytest.mark.parametrize( + "transport_class", + [ + transports.OperationsRestTransport, + ], +) +def test_operations_http_transport_client_cert_source_for_mtls(transport_class): cred = ga_credentials.AnonymousCredentials() with mock.patch( "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" ) as mock_configure_mtls_channel: - transports.OperationsRestTransport( + transport_class( credentials=cred, client_cert_source_for_mtls=client_cert_source_callback ) mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) -def test_operations_host_no_port(): - client = AbstractOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), +@pytest.mark.parametrize( + "client_class,transport_class,credentials", + PYPARAM_CLIENT_TRANSPORT_CREDENTIALS, +) +def test_operations_host_no_port(client_class, transport_class, credentials): + client = client_class( + credentials=credentials, client_options=client_options.ClientOptions( api_endpoint="longrunning.googleapis.com" ), @@ -821,9 +1286,13 @@ def test_operations_host_no_port(): assert client.transport._host == "https://longrunning.googleapis.com" -def test_operations_host_with_port(): - client = AbstractOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), +@pytest.mark.parametrize( + "client_class,transport_class,credentials", + PYPARAM_CLIENT_TRANSPORT_CREDENTIALS, +) +def test_operations_host_with_port(client_class, transport_class, credentials): + client = client_class( + credentials=credentials, client_options=client_options.ClientOptions( api_endpoint="longrunning.googleapis.com:8000" ), @@ -831,127 +1300,165 @@ def test_operations_host_with_port(): assert client.transport._host == "https://longrunning.googleapis.com:8000" -def test_common_billing_account_path(): +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_common_billing_account_path(client_class): billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) - actual = AbstractOperationsClient.common_billing_account_path(billing_account) + actual = client_class.common_billing_account_path(billing_account) assert expected == actual -def test_parse_common_billing_account_path(): +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_parse_common_billing_account_path(client_class): expected = { "billing_account": "clam", } - path = AbstractOperationsClient.common_billing_account_path(**expected) + path = client_class.common_billing_account_path(**expected) # Check that the path construction is reversible. - actual = AbstractOperationsClient.parse_common_billing_account_path(path) + actual = client_class.parse_common_billing_account_path(path) assert expected == actual -def test_common_folder_path(): +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_common_folder_path(client_class): folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) - actual = AbstractOperationsClient.common_folder_path(folder) + actual = client_class.common_folder_path(folder) assert expected == actual -def test_parse_common_folder_path(): +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_parse_common_folder_path(client_class): expected = { "folder": "octopus", } - path = AbstractOperationsClient.common_folder_path(**expected) + path = client_class.common_folder_path(**expected) # Check that the path construction is reversible. - actual = AbstractOperationsClient.parse_common_folder_path(path) + actual = client_class.parse_common_folder_path(path) assert expected == actual -def test_common_organization_path(): +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_common_organization_path(client_class): organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) - actual = AbstractOperationsClient.common_organization_path(organization) + actual = client_class.common_organization_path(organization) assert expected == actual -def test_parse_common_organization_path(): +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_parse_common_organization_path(client_class): expected = { "organization": "nudibranch", } - path = AbstractOperationsClient.common_organization_path(**expected) + path = client_class.common_organization_path(**expected) # Check that the path construction is reversible. - actual = AbstractOperationsClient.parse_common_organization_path(path) + actual = client_class.parse_common_organization_path(path) assert expected == actual -def test_common_project_path(): +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_common_project_path(client_class): project = "cuttlefish" expected = "projects/{project}".format( project=project, ) - actual = AbstractOperationsClient.common_project_path(project) + actual = client_class.common_project_path(project) assert expected == actual -def test_parse_common_project_path(): +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_parse_common_project_path(client_class): expected = { "project": "mussel", } - path = AbstractOperationsClient.common_project_path(**expected) + path = client_class.common_project_path(**expected) # Check that the path construction is reversible. - actual = AbstractOperationsClient.parse_common_project_path(path) + actual = client_class.parse_common_project_path(path) assert expected == actual -def test_common_location_path(): +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_common_location_path(client_class): project = "winkle" location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, ) - actual = AbstractOperationsClient.common_location_path(project, location) + actual = client_class.common_location_path(project, location) assert expected == actual -def test_parse_common_location_path(): +@pytest.mark.parametrize( + "client_class", + PYPARAM_CLIENT, +) +def test_parse_common_location_path(client_class): expected = { "project": "scallop", "location": "abalone", } - path = AbstractOperationsClient.common_location_path(**expected) + path = client_class.common_location_path(**expected) # Check that the path construction is reversible. - actual = AbstractOperationsClient.parse_common_location_path(path) + actual = client_class.parse_common_location_path(path) assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +@pytest.mark.parametrize( + "client_class,transport_class,credentials", + PYPARAM_CLIENT_TRANSPORT_CREDENTIALS, +) +def test_client_withDEFAULT_CLIENT_INFO(client_class, transport_class, credentials): client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.OperationsTransport, "_prep_wrapped_messages" - ) as prep: - AbstractOperationsClient( - credentials=ga_credentials.AnonymousCredentials(), + with mock.patch.object(transport_class, "_prep_wrapped_messages") as prep: + client_class( + credentials=credentials, client_info=client_info, ) prep.assert_called_once_with(client_info) - with mock.patch.object( - transports.OperationsTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = AbstractOperationsClient.get_transport_class() + with mock.patch.object(transport_class, "_prep_wrapped_messages") as prep: transport_class( - credentials=ga_credentials.AnonymousCredentials(), + credentials=credentials, client_info=client_info, ) prep.assert_called_once_with(client_info) diff --git a/tests/unit/retry/__init__.py b/tests/unit/retry/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/unit/retry/test_retry_base.py b/tests/unit/retry/test_retry_base.py new file mode 100644 index 000000000..212c4293b --- /dev/null +++ b/tests/unit/retry/test_retry_base.py @@ -0,0 +1,293 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import itertools +import re +from unittest import mock + +import pytest +import requests.exceptions + +from google.api_core import exceptions +from google.api_core import retry +from google.auth import exceptions as auth_exceptions + + +def test_if_exception_type(): + predicate = retry.if_exception_type(ValueError) + + assert predicate(ValueError()) + assert not predicate(TypeError()) + + +def test_if_exception_type_multiple(): + predicate = retry.if_exception_type(ValueError, TypeError) + + assert predicate(ValueError()) + assert predicate(TypeError()) + assert not predicate(RuntimeError()) + + +def test_if_transient_error(): + assert retry.if_transient_error(exceptions.InternalServerError("")) + assert retry.if_transient_error(exceptions.TooManyRequests("")) + assert retry.if_transient_error(exceptions.ServiceUnavailable("")) + assert retry.if_transient_error(requests.exceptions.ConnectionError("")) + assert retry.if_transient_error(requests.exceptions.ChunkedEncodingError("")) + assert retry.if_transient_error(auth_exceptions.TransportError("")) + assert not retry.if_transient_error(exceptions.InvalidArgument("")) + + +# Make uniform return half of its maximum, which will be the calculated +# sleep time. +@mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) +def test_exponential_sleep_generator_base_2(uniform): + gen = retry.exponential_sleep_generator(1, 60, multiplier=2) + + result = list(itertools.islice(gen, 8)) + assert result == [1, 2, 4, 8, 16, 32, 60, 60] + + +def test_build_retry_error_empty_list(): + """ + attempt to build a retry error with no errors encountered + should return a generic RetryError + """ + from google.api_core.retry import build_retry_error + from google.api_core.retry import RetryFailureReason + + reason = RetryFailureReason.NON_RETRYABLE_ERROR + src, cause = build_retry_error([], reason, 10) + assert isinstance(src, exceptions.RetryError) + assert cause is None + assert src.message == "Unknown error" + + +def test_build_retry_error_timeout_message(): + """ + should provide helpful error message when timeout is reached + """ + from google.api_core.retry import build_retry_error + from google.api_core.retry import RetryFailureReason + + reason = RetryFailureReason.TIMEOUT + cause = RuntimeError("timeout") + src, found_cause = build_retry_error([ValueError(), cause], reason, 10) + assert isinstance(src, exceptions.RetryError) + assert src.message == "Timeout of 10.0s exceeded" + # should attach appropriate cause + assert found_cause is cause + + +def test_build_retry_error_empty_timeout(): + """ + attempt to build a retry error when timeout is None + should return a generic timeout error message + """ + from google.api_core.retry import build_retry_error + from google.api_core.retry import RetryFailureReason + + reason = RetryFailureReason.TIMEOUT + src, _ = build_retry_error([], reason, None) + assert isinstance(src, exceptions.RetryError) + assert src.message == "Timeout exceeded" + + +class Test_BaseRetry(object): + def _make_one(self, *args, **kwargs): + return retry.retry_base._BaseRetry(*args, **kwargs) + + def test_constructor_defaults(self): + retry_ = self._make_one() + assert retry_._predicate == retry.if_transient_error + assert retry_._initial == 1 + assert retry_._maximum == 60 + assert retry_._multiplier == 2 + assert retry_._timeout == 120 + assert retry_._on_error is None + assert retry_.timeout == 120 + assert retry_.timeout == 120 + + def test_constructor_options(self): + _some_function = mock.Mock() + + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + timeout=4, + on_error=_some_function, + ) + assert retry_._predicate == mock.sentinel.predicate + assert retry_._initial == 1 + assert retry_._maximum == 2 + assert retry_._multiplier == 3 + assert retry_._timeout == 4 + assert retry_._on_error is _some_function + + @pytest.mark.parametrize("use_deadline", [True, False]) + @pytest.mark.parametrize("value", [None, 0, 1, 4, 42, 5.5]) + def test_with_timeout(self, use_deadline, value): + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + timeout=4, + on_error=mock.sentinel.on_error, + ) + new_retry = ( + retry_.with_timeout(value) + if not use_deadline + else retry_.with_deadline(value) + ) + assert retry_ is not new_retry + assert new_retry._timeout == value + assert ( + new_retry.timeout == value + if not use_deadline + else new_retry.deadline == value + ) + + # the rest of the attributes should remain the same + assert new_retry._predicate is retry_._predicate + assert new_retry._initial == retry_._initial + assert new_retry._maximum == retry_._maximum + assert new_retry._multiplier == retry_._multiplier + assert new_retry._on_error is retry_._on_error + + def test_with_predicate(self): + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + timeout=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_predicate(mock.sentinel.predicate) + assert retry_ is not new_retry + assert new_retry._predicate == mock.sentinel.predicate + + # the rest of the attributes should remain the same + assert new_retry._timeout == retry_._timeout + assert new_retry._initial == retry_._initial + assert new_retry._maximum == retry_._maximum + assert new_retry._multiplier == retry_._multiplier + assert new_retry._on_error is retry_._on_error + + def test_with_delay_noop(self): + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + timeout=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_delay() + assert retry_ is not new_retry + assert new_retry._initial == retry_._initial + assert new_retry._maximum == retry_._maximum + assert new_retry._multiplier == retry_._multiplier + + @pytest.mark.parametrize( + "originals,updated,expected", + [ + [(1, 2, 3), (4, 5, 6), (4, 5, 6)], + [(1, 2, 3), (0, 0, 0), (0, 0, 0)], + [(1, 2, 3), (None, None, None), (1, 2, 3)], + [(0, 0, 0), (None, None, None), (0, 0, 0)], + [(1, 2, 3), (None, 0.5, None), (1, 0.5, 3)], + [(1, 2, 3), (None, 0.5, 4), (1, 0.5, 4)], + [(1, 2, 3), (9, None, None), (9, 2, 3)], + ], + ) + def test_with_delay(self, originals, updated, expected): + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=originals[0], + maximum=originals[1], + multiplier=originals[2], + timeout=14, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_delay( + initial=updated[0], maximum=updated[1], multiplier=updated[2] + ) + assert retry_ is not new_retry + assert new_retry._initial == expected[0] + assert new_retry._maximum == expected[1] + assert new_retry._multiplier == expected[2] + + # the rest of the attributes should remain the same + assert new_retry._timeout == retry_._timeout + assert new_retry._predicate is retry_._predicate + assert new_retry._on_error is retry_._on_error + + def test_with_delay_partial_options(self): + retry_ = self._make_one( + predicate=mock.sentinel.predicate, + initial=1, + maximum=2, + multiplier=3, + timeout=4, + on_error=mock.sentinel.on_error, + ) + new_retry = retry_.with_delay(initial=4) + assert retry_ is not new_retry + assert new_retry._initial == 4 + assert new_retry._maximum == 2 + assert new_retry._multiplier == 3 + + new_retry = retry_.with_delay(maximum=4) + assert retry_ is not new_retry + assert new_retry._initial == 1 + assert new_retry._maximum == 4 + assert new_retry._multiplier == 3 + + new_retry = retry_.with_delay(multiplier=4) + assert retry_ is not new_retry + assert new_retry._initial == 1 + assert new_retry._maximum == 2 + assert new_retry._multiplier == 4 + + # the rest of the attributes should remain the same + assert new_retry._timeout == retry_._timeout + assert new_retry._predicate is retry_._predicate + assert new_retry._on_error is retry_._on_error + + def test___str__(self): + def if_exception_type(exc): + return bool(exc) # pragma: NO COVER + + # Explicitly set all attributes as changed Retry defaults should not + # cause this test to start failing. + retry_ = self._make_one( + predicate=if_exception_type, + initial=1.0, + maximum=60.0, + multiplier=2.0, + timeout=120.0, + on_error=None, + ) + assert re.match( + ( + r"<_BaseRetry predicate=, " + r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, " + r"on_error=None>" + ), + str(retry_), + ) diff --git a/tests/unit/retry/test_retry_imports.py b/tests/unit/retry/test_retry_imports.py new file mode 100644 index 000000000..597909fc2 --- /dev/null +++ b/tests/unit/retry/test_retry_imports.py @@ -0,0 +1,33 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +def test_legacy_imports_retry_unary_sync(): + # TODO: Delete this test when when we revert these imports on the + # next major version release + # (https://github.com/googleapis/python-api-core/issues/576) + from google.api_core.retry import datetime_helpers # noqa: F401 + from google.api_core.retry import exceptions # noqa: F401 + from google.api_core.retry import auth_exceptions # noqa: F401 + + +def test_legacy_imports_retry_unary_async(): + # TODO: Delete this test when when we revert these imports on the + # next major version release + # (https://github.com/googleapis/python-api-core/issues/576) + from google.api_core import retry_async # noqa: F401 + + # See https://github.com/googleapis/python-api-core/issues/586 + # for context on why we need to test this import this explicitly. + from google.api_core.retry_async import AsyncRetry # noqa: F401 diff --git a/tests/unit/retry/test_retry_streaming.py b/tests/unit/retry/test_retry_streaming.py new file mode 100644 index 000000000..2499b2ae9 --- /dev/null +++ b/tests/unit/retry/test_retry_streaming.py @@ -0,0 +1,505 @@ +# Copyright 2017 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore + +import pytest + +from google.api_core import exceptions +from google.api_core import retry +from google.api_core.retry import retry_streaming + +from .test_retry_base import Test_BaseRetry + + +def test_retry_streaming_target_bad_sleep_generator(): + with pytest.raises( + ValueError, match="Sleep generator stopped yielding sleep values" + ): + next(retry_streaming.retry_target_stream(None, lambda x: True, [], None)) + + +@mock.patch("time.sleep", autospec=True) +def test_retry_streaming_target_dynamic_backoff(sleep): + """ + sleep_generator should be iterated after on_error, to support dynamic backoff + """ + from functools import partial + + sleep.side_effect = RuntimeError("stop after sleep") + # start with empty sleep generator; values are added after exception in push_sleep_value + sleep_values = [] + error_target = partial(TestStreamingRetry._generator_mock, error_on=0) + inserted_sleep = 99 + + def push_sleep_value(err): + sleep_values.append(inserted_sleep) + + with pytest.raises(RuntimeError): + next( + retry_streaming.retry_target_stream( + error_target, + predicate=lambda x: True, + sleep_generator=sleep_values, + on_error=push_sleep_value, + ) + ) + assert sleep.call_count == 1 + sleep.assert_called_once_with(inserted_sleep) + + +class TestStreamingRetry(Test_BaseRetry): + def _make_one(self, *args, **kwargs): + return retry_streaming.StreamingRetry(*args, **kwargs) + + def test___str__(self): + def if_exception_type(exc): + return bool(exc) # pragma: NO COVER + + # Explicitly set all attributes as changed Retry defaults should not + # cause this test to start failing. + retry_ = retry_streaming.StreamingRetry( + predicate=if_exception_type, + initial=1.0, + maximum=60.0, + multiplier=2.0, + timeout=120.0, + on_error=None, + ) + assert re.match( + ( + r", " + r"initial=1.0, maximum=60.0, multiplier=2.0, timeout=120.0, " + r"on_error=None>" + ), + str(retry_), + ) + + @staticmethod + def _generator_mock( + num=5, + error_on=None, + return_val=None, + exceptions_seen=None, + ): + """ + Helper to create a mock generator that yields a number of values + Generator can optionally raise an exception on a specific iteration + + Args: + - num (int): the number of values to yield. After this, the generator will return `return_val` + - error_on (int): if given, the generator will raise a ValueError on the specified iteration + - return_val (any): if given, the generator will return this value after yielding num values + - exceptions_seen (list): if given, the generator will append any exceptions to this list before raising + """ + try: + for i in range(num): + if error_on is not None and i == error_on: + raise ValueError("generator mock error") + yield i + return return_val + except (Exception, BaseException, GeneratorExit) as e: + # keep track of exceptions seen by generator + if exceptions_seen is not None: + exceptions_seen.append(e) + raise + + @mock.patch("time.sleep", autospec=True) + def test___call___success(self, sleep): + """ + Test that a retry-decorated generator yields values as expected + This test checks a generator with no issues + """ + import types + import collections + + retry_ = retry_streaming.StreamingRetry() + + decorated = retry_(self._generator_mock) + + num = 10 + result = decorated(num) + # check types + assert isinstance(decorated(num), collections.abc.Iterable) + assert isinstance(decorated(num), types.GeneratorType) + assert isinstance(self._generator_mock(num), collections.abc.Iterable) + assert isinstance(self._generator_mock(num), types.GeneratorType) + # check yield contents + unpacked = [i for i in result] + assert len(unpacked) == num + for a, b in zip(unpacked, self._generator_mock(num)): + assert a == b + sleep.assert_not_called() + + @mock.patch("time.sleep", autospec=True) + def test___call___retry(self, sleep): + """ + Tests that a retry-decorated generator will retry on errors + """ + on_error = mock.Mock(return_value=None) + retry_ = retry_streaming.StreamingRetry( + on_error=on_error, + predicate=retry.if_exception_type(ValueError), + timeout=None, + ) + result = retry_(self._generator_mock)(error_on=3) + # error thrown on 3 + # generator should contain 0, 1, 2 looping + unpacked = [next(result) for i in range(10)] + assert unpacked == [0, 1, 2, 0, 1, 2, 0, 1, 2, 0] + assert on_error.call_count == 3 + + @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) + @mock.patch("time.sleep", autospec=True) + @pytest.mark.parametrize("use_deadline_arg", [True, False]) + def test___call___retry_hitting_timeout(self, sleep, uniform, use_deadline_arg): + """ + Tests that a retry-decorated generator will throw a RetryError + after using the time budget + """ + import time + + timeout_val = 30.9 + # support "deadline" as an alias for "timeout" + timeout_kwarg = ( + {"timeout": timeout_val} + if not use_deadline_arg + else {"deadline": timeout_val} + ) + + on_error = mock.Mock(return_value=None) + retry_ = retry_streaming.StreamingRetry( + predicate=retry.if_exception_type(ValueError), + initial=1.0, + maximum=1024.0, + multiplier=2.0, + **timeout_kwarg, + ) + + timenow = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", + return_value=timenow, + ) + + decorated = retry_(self._generator_mock, on_error=on_error) + generator = decorated(error_on=1) + with now_patcher as patched_now: + # Make sure that calls to fake time.sleep() also advance the mocked + # time clock. + def increase_time(sleep_delay): + patched_now.return_value += sleep_delay + + sleep.side_effect = increase_time + with pytest.raises(exceptions.RetryError): + [i for i in generator] + + assert on_error.call_count == 5 + # check the delays + assert sleep.call_count == 4 # once between each successive target calls + last_wait = sleep.call_args.args[0] + total_wait = sum(call_args.args[0] for call_args in sleep.call_args_list) + assert last_wait == 8.0 + assert total_wait == 15.0 + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_send(self, sleep): + """ + Send should be passed through retry into target generator + """ + + def _mock_send_gen(): + """ + always yield whatever was sent in + """ + in_ = yield + while True: + in_ = yield in_ + + retry_ = retry_streaming.StreamingRetry() + + decorated = retry_(_mock_send_gen) + + generator = decorated() + result = next(generator) + # first yield should be None + assert result is None + in_messages = ["test_1", "hello", "world"] + out_messages = [] + for msg in in_messages: + recv = generator.send(msg) + out_messages.append(recv) + assert in_messages == out_messages + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_send_retry(self, sleep): + """ + Send should support retries like next + """ + on_error = mock.Mock(return_value=None) + retry_ = retry_streaming.StreamingRetry( + on_error=on_error, + predicate=retry.if_exception_type(ValueError), + timeout=None, + ) + result = retry_(self._generator_mock)(error_on=3) + with pytest.raises(TypeError) as exc_info: + # calling first send with non-None input should raise a TypeError + result.send("can not send to fresh generator") + assert exc_info.match("can't send non-None value") + # initiate iteration with None + result = retry_(self._generator_mock)(error_on=3) + assert result.send(None) == 0 + # error thrown on 3 + # generator should contain 0, 1, 2 looping + unpacked = [result.send(i) for i in range(10)] + assert unpacked == [1, 2, 0, 1, 2, 0, 1, 2, 0, 1] + assert on_error.call_count == 3 + + @mock.patch("time.sleep", autospec=True) + def test___call___with_iterable_send(self, sleep): + """ + send should raise attribute error if wrapped iterator does not support it + """ + retry_ = retry_streaming.StreamingRetry() + + def iterable_fn(n): + return iter(range(n)) + + decorated = retry_(iterable_fn) + generator = decorated(5) + # initialize + next(generator) + # call send + with pytest.raises(AttributeError): + generator.send("test") + + @mock.patch("time.sleep", autospec=True) + def test___call___with_iterable_close(self, sleep): + """ + close should be handled by wrapper if wrapped iterable does not support it + """ + retry_ = retry_streaming.StreamingRetry() + + def iterable_fn(n): + return iter(range(n)) + + decorated = retry_(iterable_fn) + + # try closing active generator + retryable = decorated(10) + assert next(retryable) == 0 + retryable.close() + with pytest.raises(StopIteration): + next(retryable) + + # try closing a new generator + retryable = decorated(10) + retryable.close() + with pytest.raises(StopIteration): + next(retryable) + + @mock.patch("time.sleep", autospec=True) + def test___call___with_iterable_throw(self, sleep): + """ + Throw should work even if the wrapped iterable does not support it + """ + predicate = retry.if_exception_type(ValueError) + retry_ = retry_streaming.StreamingRetry(predicate=predicate) + + def iterable_fn(n): + return iter(range(n)) + + decorated = retry_(iterable_fn) + + # try throwing with active generator + retryable = decorated(10) + assert next(retryable) == 0 + # should swallow errors in predicate + retryable.throw(ValueError) + assert next(retryable) == 1 + # should raise on other errors + with pytest.raises(TypeError): + retryable.throw(TypeError) + with pytest.raises(StopIteration): + next(retryable) + + # try throwing with a new generator + retryable = decorated(10) + with pytest.raises(ValueError): + retryable.throw(ValueError) + with pytest.raises(StopIteration): + next(retryable) + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_return(self, sleep): + """ + Generator return value should be passed through retry decorator + """ + retry_ = retry_streaming.StreamingRetry() + + decorated = retry_(self._generator_mock) + + expected_value = "done" + generator = decorated(5, return_val=expected_value) + found_value = None + try: + while True: + next(generator) + except StopIteration as e: + found_value = e.value + assert found_value == expected_value + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_close(self, sleep): + """ + Close should be passed through retry into target generator + """ + retry_ = retry_streaming.StreamingRetry() + + decorated = retry_(self._generator_mock) + + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) + for i in range(2): + next(generator) + generator.close() + assert isinstance(exception_list[0], GeneratorExit) + with pytest.raises(StopIteration): + # calling next on closed generator should raise error + next(generator) + + @mock.patch("time.sleep", autospec=True) + def test___call___with_generator_throw(self, sleep): + """ + Throw should be passed through retry into target generator + """ + retry_ = retry_streaming.StreamingRetry( + predicate=retry.if_exception_type(ValueError), + ) + decorated = retry_(self._generator_mock) + + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) + for i in range(2): + next(generator) + with pytest.raises(BufferError): + generator.throw(BufferError("test")) + assert isinstance(exception_list[0], BufferError) + with pytest.raises(StopIteration): + # calling next on closed generator should raise error + next(generator) + # should retry if throw retryable exception + exception_list = [] + generator = decorated(10, exceptions_seen=exception_list) + for i in range(2): + next(generator) + val = generator.throw(ValueError("test")) + assert val == 0 + assert isinstance(exception_list[0], ValueError) + # calling next on closed generator should not raise error + assert next(generator) == 1 + + def test_exc_factory_non_retryable_error(self): + """ + generator should give the option to override exception creation logic + test when non-retryable error is thrown + """ + from google.api_core.retry import RetryFailureReason + from google.api_core.retry.retry_streaming import retry_target_stream + + timeout = None + sent_errors = [ValueError("test"), ValueError("test2"), BufferError("test3")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(kwargs) == 0 + assert args[0] == sent_errors + assert args[1] == RetryFailureReason.NON_RETRYABLE_ERROR + assert args[2] == timeout + return expected_final_err, expected_source_err + + generator = retry_target_stream( + self._generator_mock, + retry.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + ) + # initialize generator + next(generator) + # trigger some retryable errors + generator.throw(sent_errors[0]) + generator.throw(sent_errors[1]) + # trigger a non-retryable error + with pytest.raises(expected_final_err.__class__) as exc_info: + generator.throw(sent_errors[2]) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err + + def test_exc_factory_timeout(self): + """ + generator should give the option to override exception creation logic + test when timeout is exceeded + """ + import time + from google.api_core.retry import RetryFailureReason + from google.api_core.retry.retry_streaming import retry_target_stream + + timeout = 2 + time_now = time.monotonic() + now_patcher = mock.patch( + "time.monotonic", + return_value=time_now, + ) + + with now_patcher as patched_now: + timeout = 2 + sent_errors = [ValueError("test"), ValueError("test2"), ValueError("test3")] + expected_final_err = RuntimeError("done") + expected_source_err = ZeroDivisionError("test4") + + def factory(*args, **kwargs): + assert len(kwargs) == 0 + assert args[0] == sent_errors + assert args[1] == RetryFailureReason.TIMEOUT + assert args[2] == timeout + return expected_final_err, expected_source_err + + generator = retry_target_stream( + self._generator_mock, + retry.if_exception_type(ValueError), + [0] * 3, + timeout=timeout, + exception_factory=factory, + check_timeout_on_yield=True, + ) + # initialize generator + next(generator) + # trigger some retryable errors + generator.throw(sent_errors[0]) + generator.throw(sent_errors[1]) + # trigger a timeout + patched_now.return_value += timeout + 1 + with pytest.raises(expected_final_err.__class__) as exc_info: + generator.throw(sent_errors[2]) + assert exc_info.value == expected_final_err + assert exc_info.value.__cause__ == expected_source_err diff --git a/tests/unit/test_retry.py b/tests/unit/retry/test_retry_unary.py similarity index 56% rename from tests/unit/test_retry.py rename to tests/unit/retry/test_retry_unary.py index d770f7a2f..f5bbcff72 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/retry/test_retry_unary.py @@ -13,51 +13,19 @@ # limitations under the License. import datetime -import itertools +import pytest import re -import mock -import pytest -import requests.exceptions +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore from google.api_core import exceptions from google.api_core import retry -from google.auth import exceptions as auth_exceptions - - -def test_if_exception_type(): - predicate = retry.if_exception_type(ValueError) - - assert predicate(ValueError()) - assert not predicate(TypeError()) - - -def test_if_exception_type_multiple(): - predicate = retry.if_exception_type(ValueError, TypeError) - - assert predicate(ValueError()) - assert predicate(TypeError()) - assert not predicate(RuntimeError()) - -def test_if_transient_error(): - assert retry.if_transient_error(exceptions.InternalServerError("")) - assert retry.if_transient_error(exceptions.TooManyRequests("")) - assert retry.if_transient_error(exceptions.ServiceUnavailable("")) - assert retry.if_transient_error(requests.exceptions.ConnectionError("")) - assert retry.if_transient_error(requests.exceptions.ChunkedEncodingError("")) - assert retry.if_transient_error(auth_exceptions.TransportError("")) - assert not retry.if_transient_error(exceptions.InvalidArgument("")) - - -# Make uniform return half of its maximum, which will be the calculated -# sleep time. -@mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) -def test_exponential_sleep_generator_base_2(uniform): - gen = retry.exponential_sleep_generator(1, 60, multiplier=2) - - result = list(itertools.islice(gen, 8)) - assert result == [1, 2, 4, 8, 16, 32, 60, 60] +from .test_retry_base import Test_BaseRetry @mock.patch("time.sleep", autospec=True) @@ -145,30 +113,29 @@ async def test_retry_target_warning_for_retry(utcnow, sleep): retry.retry_target(target, predicate, range(10), None) assert len(exc_info) == 2 - assert str(exc_info[0].message) == retry._ASYNC_RETRY_WARNING + assert str(exc_info[0].message) == retry.retry_unary._ASYNC_RETRY_WARNING sleep.assert_not_called() @mock.patch("time.sleep", autospec=True) -@mock.patch("google.api_core.datetime_helpers.utcnow", autospec=True) -def test_retry_target_deadline_exceeded(utcnow, sleep): +@mock.patch("time.monotonic", autospec=True) +@pytest.mark.parametrize("use_deadline_arg", [True, False]) +def test_retry_target_timeout_exceeded(monotonic, sleep, use_deadline_arg): predicate = retry.if_exception_type(ValueError) exception = ValueError("meep") target = mock.Mock(side_effect=exception) # Setup the timeline so that the first call takes 5 seconds but the second - # call takes 6, which puts the retry over the deadline. - utcnow.side_effect = [ - # The first call to utcnow establishes the start of the timeline. - datetime.datetime.min, - datetime.datetime.min + datetime.timedelta(seconds=5), - datetime.datetime.min + datetime.timedelta(seconds=11), - ] + # call takes 6, which puts the retry over the timeout. + monotonic.side_effect = [0, 5, 11] + + # support "deadline" as an alias for "timeout" + kwargs = {"timeout": 10} if not use_deadline_arg else {"deadline": 10} with pytest.raises(exceptions.RetryError) as exc_info: - retry.retry_target(target, predicate, range(10), deadline=10) + retry.retry_target(target, predicate, range(10), **kwargs) assert exc_info.value.cause == exception - assert exc_info.match("Deadline of 10.0s exceeded") + assert exc_info.match("Timeout of 10.0s exceeded") assert exc_info.match("last exception: meep") assert target.call_count == 2 @@ -179,145 +146,38 @@ def test_retry_target_deadline_exceeded(utcnow, sleep): def test_retry_target_bad_sleep_generator(): with pytest.raises(ValueError, match="Sleep generator"): - retry.retry_target(mock.sentinel.target, mock.sentinel.predicate, [], None) - + retry.retry_target(mock.sentinel.target, lambda x: True, [], None) -class TestRetry(object): - def test_constructor_defaults(self): - retry_ = retry.Retry() - assert retry_._predicate == retry.if_transient_error - assert retry_._initial == 1 - assert retry_._maximum == 60 - assert retry_._multiplier == 2 - assert retry_._deadline == 120 - assert retry_._on_error is None - assert retry_.deadline == 120 - assert retry_.timeout == 120 - - def test_constructor_options(self): - _some_function = mock.Mock() - retry_ = retry.Retry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=_some_function, +@mock.patch("time.sleep", autospec=True) +def test_retry_target_dynamic_backoff(sleep): + """ + sleep_generator should be iterated after on_error, to support dynamic backoff + """ + sleep.side_effect = RuntimeError("stop after sleep") + # start with empty sleep generator; values are added after exception in push_sleep_value + sleep_values = [] + exception = ValueError("trigger retry") + error_target = mock.Mock(side_effect=exception) + inserted_sleep = 99 + + def push_sleep_value(err): + sleep_values.append(inserted_sleep) + + with pytest.raises(RuntimeError): + retry.retry_target( + error_target, + predicate=lambda x: True, + sleep_generator=sleep_values, + on_error=push_sleep_value, ) - assert retry_._predicate == mock.sentinel.predicate - assert retry_._initial == 1 - assert retry_._maximum == 2 - assert retry_._multiplier == 3 - assert retry_._deadline == 4 - assert retry_._on_error is _some_function + assert sleep.call_count == 1 + sleep.assert_called_once_with(inserted_sleep) - def test_with_deadline(self): - retry_ = retry.Retry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_deadline(42) - assert retry_ is not new_retry - assert new_retry._deadline == 42 - - # the rest of the attributes should remain the same - assert new_retry._predicate is retry_._predicate - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - assert new_retry._on_error is retry_._on_error - - def test_with_predicate(self): - retry_ = retry.Retry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_predicate(mock.sentinel.predicate) - assert retry_ is not new_retry - assert new_retry._predicate == mock.sentinel.predicate - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - assert new_retry._on_error is retry_._on_error - - def test_with_delay_noop(self): - retry_ = retry.Retry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay() - assert retry_ is not new_retry - assert new_retry._initial == retry_._initial - assert new_retry._maximum == retry_._maximum - assert new_retry._multiplier == retry_._multiplier - def test_with_delay(self): - retry_ = retry.Retry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay(initial=5, maximum=6, multiplier=7) - assert retry_ is not new_retry - assert new_retry._initial == 5 - assert new_retry._maximum == 6 - assert new_retry._multiplier == 7 - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._predicate is retry_._predicate - assert new_retry._on_error is retry_._on_error - - def test_with_delay_partial_options(self): - retry_ = retry.Retry( - predicate=mock.sentinel.predicate, - initial=1, - maximum=2, - multiplier=3, - deadline=4, - on_error=mock.sentinel.on_error, - ) - new_retry = retry_.with_delay(initial=4) - assert retry_ is not new_retry - assert new_retry._initial == 4 - assert new_retry._maximum == 2 - assert new_retry._multiplier == 3 - - new_retry = retry_.with_delay(maximum=4) - assert retry_ is not new_retry - assert new_retry._initial == 1 - assert new_retry._maximum == 4 - assert new_retry._multiplier == 3 - - new_retry = retry_.with_delay(multiplier=4) - assert retry_ is not new_retry - assert new_retry._initial == 1 - assert new_retry._maximum == 2 - assert new_retry._multiplier == 4 - - # the rest of the attributes should remain the same - assert new_retry._deadline == retry_._deadline - assert new_retry._predicate is retry_._predicate - assert new_retry._on_error is retry_._on_error +class TestRetry(Test_BaseRetry): + def _make_one(self, *args, **kwargs): + return retry.Retry(*args, **kwargs) def test___str__(self): def if_exception_type(exc): @@ -330,7 +190,7 @@ def if_exception_type(exc): initial=1.0, maximum=60.0, multiplier=2.0, - deadline=120.0, + timeout=120.0, on_error=None, ) assert re.match( @@ -361,7 +221,6 @@ def test___call___and_execute_success(self, sleep): @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("time.sleep", autospec=True) def test___call___and_execute_retry(self, sleep, uniform): - on_error = mock.Mock(spec=["__call__"], side_effect=[None]) retry_ = retry.Retry(predicate=retry.if_exception_type(ValueError)) @@ -382,21 +241,17 @@ def test___call___and_execute_retry(self, sleep, uniform): @mock.patch("random.uniform", autospec=True, side_effect=lambda m, n: n) @mock.patch("time.sleep", autospec=True) - def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform): - + def test___call___and_execute_retry_hitting_timeout(self, sleep, uniform): on_error = mock.Mock(spec=["__call__"], side_effect=[None] * 10) retry_ = retry.Retry( predicate=retry.if_exception_type(ValueError), initial=1.0, maximum=1024.0, multiplier=2.0, - deadline=30.9, + timeout=30.9, ) - utcnow = datetime.datetime.utcnow() - utcnow_patcher = mock.patch( - "google.api_core.datetime_helpers.utcnow", return_value=utcnow - ) + monotonic_patcher = mock.patch("time.monotonic", return_value=0) target = mock.Mock(spec=["__call__"], side_effect=[ValueError()] * 10) # __name__ is needed by functools.partial. @@ -405,11 +260,11 @@ def test___call___and_execute_retry_hitting_deadline(self, sleep, uniform): decorated = retry_(target, on_error=on_error) target.assert_not_called() - with utcnow_patcher as patched_utcnow: + with monotonic_patcher as patched_monotonic: # Make sure that calls to fake time.sleep() also advance the mocked # time clock. def increase_time(sleep_delay): - patched_utcnow.return_value += datetime.timedelta(seconds=sleep_delay) + patched_monotonic.return_value += sleep_delay sleep.side_effect = increase_time @@ -429,7 +284,7 @@ def increase_time(sleep_delay): # Next attempt would be scheduled in 16 secs, 15 + 16 = 31 > 30.9, thus # we do not even wait for it to be scheduled (30.9 is configured timeout). # This changes the previous logic of shortening the last attempt to fit - # in the deadline. The previous logic was removed to make Python retry + # in the timeout. The previous logic was removed to make Python retry # logic consistent with the other languages and to not disrupt the # randomized retry delays distribution by artificially increasing a # probability of scheduling two (instead of one) last attempts with very diff --git a/tests/unit/test_bidi.py b/tests/unit/test_bidi.py index 84ac9dc52..4a8eb74fa 100644 --- a/tests/unit/test_bidi.py +++ b/tests/unit/test_bidi.py @@ -16,8 +16,14 @@ import logging import queue import threading +import time + +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER # noqa: F401 +except ImportError: # pragma: NO COVER + import mock # type: ignore -import mock import pytest try: @@ -291,11 +297,23 @@ def test_close(self): # ensure the request queue was signaled to stop. assert bidi_rpc.pending_requests == 1 assert bidi_rpc._request_queue.get() is None + # ensure request and callbacks are cleaned up + assert bidi_rpc._initial_request is None + assert not bidi_rpc._callbacks - def test_close_no_rpc(self): + def test_close_with_no_rpc(self): bidi_rpc = bidi.BidiRpc(None) bidi_rpc.close() + assert bidi_rpc.call is None + assert bidi_rpc.is_active is False + # ensure the request queue was signaled to stop. + assert bidi_rpc.pending_requests == 1 + assert bidi_rpc._request_queue.get() is None + # ensure request and callbacks are cleaned up + assert bidi_rpc._initial_request is None + assert not bidi_rpc._callbacks + def test_send(self): rpc, call = make_rpc() bidi_rpc = bidi.BidiRpc(rpc) @@ -618,6 +636,8 @@ def cancel_side_effect(): assert bidi_rpc.pending_requests == 1 assert bidi_rpc._request_queue.get() is None assert bidi_rpc._finalized + assert bidi_rpc._initial_request is None + assert not bidi_rpc._callbacks def test_reopen_failure_on_rpc_restart(self): error1 = ValueError("1") @@ -772,6 +792,7 @@ def on_response(response): consumer.stop() assert consumer.is_active is False + assert consumer._on_response is None def test_wake_on_error(self): should_continue = threading.Event() @@ -816,7 +837,13 @@ def test_rpc_callback_fires_when_consumer_start_fails(self): bidi_rpc._start_rpc.side_effect = expected_exception consumer = bidi.BackgroundConsumer(bidi_rpc, on_response=None) + consumer.start() + + # Wait for the consumer's thread to exit. + while consumer.is_active: + pass # pragma: NO COVER + assert callback.call_args.args[0] == grpc.StatusCode.INVALID_ARGUMENT def test_consumer_expected_error(self, caplog): @@ -879,6 +906,60 @@ def close_side_effect(): consumer.stop() assert consumer.is_active is False + assert consumer._on_response is None # calling stop twice should not result in an error. consumer.stop() + + def test_stop_error_logs(self, caplog): + """ + Closing the client should result in no internal error logs + + https://github.com/googleapis/python-api-core/issues/788 + """ + caplog.set_level(logging.DEBUG) + bidi_rpc = mock.create_autospec(bidi.BidiRpc, instance=True) + bidi_rpc.is_active = True + on_response = mock.Mock(spec=["__call__"]) + + consumer = bidi.BackgroundConsumer(bidi_rpc, on_response) + + consumer.start() + consumer.stop() + # let the background thread run for a while before exiting + time.sleep(0.1) + bidi_rpc.is_active = False + # running thread should not result in error logs + error_logs = [r.message for r in caplog.records if r.levelname == "ERROR"] + assert not error_logs, f"Found unexpected ERROR logs: {error_logs}" + bidi_rpc.is_active = False + + def test_fatal_exceptions_can_inform_consumer(self, caplog): + """ + https://github.com/googleapis/python-api-core/issues/820 + Exceptions thrown in the BackgroundConsumer not caught by `should_recover` / `should_terminate` + on the RPC should be bubbled back to the caller through `on_fatal_exception`, if passed. + """ + caplog.set_level(logging.DEBUG) + + for fatal_exception in ( + ValueError("some non-api error"), + exceptions.PermissionDenied("some api error"), + ): + bidi_rpc = mock.create_autospec(bidi.ResumableBidiRpc, instance=True) + bidi_rpc.is_active = True + on_response = mock.Mock(spec=["__call__"]) + + on_fatal_exception = mock.Mock(spec=["__call__"]) + + bidi_rpc.open.side_effect = fatal_exception + + consumer = bidi.BackgroundConsumer( + bidi_rpc, on_response, on_fatal_exception + ) + + consumer.start() + # let the background thread run for a while before exiting + time.sleep(0.1) + + on_fatal_exception.assert_called_once_with(fatal_exception) diff --git a/tests/unit/test_client_info.py b/tests/unit/test_client_info.py index 3361fef6e..3eacabcaf 100644 --- a/tests/unit/test_client_info.py +++ b/tests/unit/test_client_info.py @@ -46,6 +46,7 @@ def test_constructor_options(): client_library_version="5", user_agent="6", rest_version="7", + protobuf_runtime_version="8", ) assert info.python_version == "1" @@ -55,11 +56,15 @@ def test_constructor_options(): assert info.client_library_version == "5" assert info.user_agent == "6" assert info.rest_version == "7" + assert info.protobuf_runtime_version == "8" def test_to_user_agent_minimal(): info = client_info.ClientInfo( - python_version="1", api_core_version="2", grpc_version=None + python_version="1", + api_core_version="2", + grpc_version=None, + protobuf_runtime_version=None, ) user_agent = info.to_user_agent() @@ -75,11 +80,12 @@ def test_to_user_agent_full(): gapic_version="4", client_library_version="5", user_agent="app-name/1.0", + protobuf_runtime_version="6", ) user_agent = info.to_user_agent() - assert user_agent == "app-name/1.0 gl-python/1 grpc/2 gax/3 gapic/4 gccl/5" + assert user_agent == "app-name/1.0 gl-python/1 grpc/2 gax/3 gapic/4 gccl/5 pb/6" def test_to_user_agent_rest(): diff --git a/tests/unit/test_client_logging.py b/tests/unit/test_client_logging.py new file mode 100644 index 000000000..c73b269fe --- /dev/null +++ b/tests/unit/test_client_logging.py @@ -0,0 +1,139 @@ +import json +import logging +from unittest import mock + +from google.api_core.client_logging import ( + setup_logging, + initialize_logging, + StructuredLogFormatter, +) + + +def reset_logger(scope): + logger = logging.getLogger(scope) + logger.handlers = [] + logger.setLevel(logging.NOTSET) + logger.propagate = True + + +def test_setup_logging_w_no_scopes(): + with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"): + setup_logging() + base_logger = logging.getLogger("foogle") + assert base_logger.handlers == [] + assert not base_logger.propagate + assert base_logger.level == logging.NOTSET + + reset_logger("foogle") + + +def test_setup_logging_w_base_scope(): + with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"): + setup_logging("foogle") + base_logger = logging.getLogger("foogle") + assert isinstance(base_logger.handlers[0], logging.StreamHandler) + assert not base_logger.propagate + assert base_logger.level == logging.DEBUG + + reset_logger("foogle") + + +def test_setup_logging_w_configured_scope(): + with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"): + base_logger = logging.getLogger("foogle") + base_logger.propagate = False + setup_logging("foogle") + assert base_logger.handlers == [] + assert not base_logger.propagate + assert base_logger.level == logging.NOTSET + + reset_logger("foogle") + + +def test_setup_logging_w_module_scope(): + with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"): + setup_logging("foogle.bar") + + base_logger = logging.getLogger("foogle") + assert base_logger.handlers == [] + assert not base_logger.propagate + assert base_logger.level == logging.NOTSET + + module_logger = logging.getLogger("foogle.bar") + assert isinstance(module_logger.handlers[0], logging.StreamHandler) + assert not module_logger.propagate + assert module_logger.level == logging.DEBUG + + reset_logger("foogle") + reset_logger("foogle.bar") + + +def test_setup_logging_w_incorrect_scope(): + with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"): + setup_logging("abc") + + base_logger = logging.getLogger("foogle") + assert base_logger.handlers == [] + assert not base_logger.propagate + assert base_logger.level == logging.NOTSET + + # TODO(https://github.com/googleapis/python-api-core/issues/759): update test once we add logic to ignore an incorrect scope. + logger = logging.getLogger("abc") + assert isinstance(logger.handlers[0], logging.StreamHandler) + assert not logger.propagate + assert logger.level == logging.DEBUG + + reset_logger("foogle") + reset_logger("abc") + + +def test_initialize_logging(): + with mock.patch("os.getenv", return_value="foogle.bar"): + with mock.patch("google.api_core.client_logging._BASE_LOGGER_NAME", "foogle"): + initialize_logging() + + base_logger = logging.getLogger("foogle") + assert base_logger.handlers == [] + assert not base_logger.propagate + assert base_logger.level == logging.NOTSET + + module_logger = logging.getLogger("foogle.bar") + assert isinstance(module_logger.handlers[0], logging.StreamHandler) + assert not module_logger.propagate + assert module_logger.level == logging.DEBUG + + # Check that `initialize_logging()` is a no-op after the first time by verifying that user-set configs are not modified: + base_logger.propagate = True + module_logger.propagate = True + + initialize_logging() + + assert base_logger.propagate + assert module_logger.propagate + + reset_logger("foogle") + reset_logger("foogle.bar") + + +def test_structured_log_formatter(): + # TODO(https://github.com/googleapis/python-api-core/issues/761): Test additional fields when implemented. + record = logging.LogRecord( + name="Appelation", + level=logging.DEBUG, + msg="This is a test message.", + pathname="some/path", + lineno=25, + args=None, + exc_info=None, + ) + + # Extra fields: + record.rpcName = "bar" + + formatted_msg = StructuredLogFormatter().format(record) + parsed_msg = json.loads(formatted_msg) + + assert parsed_msg["name"] == "Appelation" + assert parsed_msg["severity"] == "DEBUG" + assert parsed_msg["message"] == "This is a test message." + assert parsed_msg["rpcName"] == "bar" diff --git a/tests/unit/test_client_options.py b/tests/unit/test_client_options.py index 336ceeabf..54558eea0 100644 --- a/tests/unit/test_client_options.py +++ b/tests/unit/test_client_options.py @@ -14,6 +14,7 @@ from re import match import pytest +from ..helpers import warn_deprecated_credentials_file from google.api_core import client_options @@ -27,18 +28,19 @@ def get_client_encrypted_cert(): def test_constructor(): - - options = client_options.ClientOptions( - api_endpoint="foo.googleapis.com", - client_cert_source=get_client_cert, - quota_project_id="quote-proj", - credentials_file="path/to/credentials.json", - scopes=[ - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloud-platform.read-only", - ], - api_audience="foo2.googleapis.com", - ) + with warn_deprecated_credentials_file(): + options = client_options.ClientOptions( + api_endpoint="foo.googleapis.com", + client_cert_source=get_client_cert, + quota_project_id="quote-proj", + credentials_file="path/to/credentials.json", + scopes=[ + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloud-platform.read-only", + ], + api_audience="foo2.googleapis.com", + universe_domain="googleapis.com", + ) assert options.api_endpoint == "foo.googleapis.com" assert options.client_cert_source() == (b"cert", b"key") @@ -49,10 +51,10 @@ def test_constructor(): "https://www.googleapis.com/auth/cloud-platform.read-only", ] assert options.api_audience == "foo2.googleapis.com" + assert options.universe_domain == "googleapis.com" def test_constructor_with_encrypted_cert_source(): - options = client_options.ClientOptions( api_endpoint="foo.googleapis.com", client_encrypted_cert_source=get_client_encrypted_cert, @@ -76,7 +78,6 @@ def test_constructor_with_both_cert_sources(): def test_constructor_with_api_key(): - options = client_options.ClientOptions( api_endpoint="foo.googleapis.com", client_cert_source=get_client_cert, @@ -100,16 +101,18 @@ def test_constructor_with_api_key(): def test_constructor_with_both_api_key_and_credentials_file(): with pytest.raises(ValueError): - client_options.ClientOptions( - api_key="api-key", - credentials_file="path/to/credentials.json", - ) + with warn_deprecated_credentials_file(): + client_options.ClientOptions( + api_key="api-key", + credentials_file="path/to/credentials.json", + ) def test_from_dict(): options = client_options.from_dict( { "api_endpoint": "foo.googleapis.com", + "universe_domain": "googleapis.com", "client_cert_source": get_client_cert, "quota_project_id": "quote-proj", "credentials_file": "path/to/credentials.json", @@ -122,6 +125,7 @@ def test_from_dict(): ) assert options.api_endpoint == "foo.googleapis.com" + assert options.universe_domain == "googleapis.com" assert options.client_cert_source() == (b"cert", b"key") assert options.quota_project_id == "quote-proj" assert options.credentials_file == "path/to/credentials.json" @@ -148,6 +152,7 @@ def test_repr(): expected_keys = set( [ "api_endpoint", + "universe_domain", "client_cert_source", "client_encrypted_cert_source", "quota_project_id", diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py index 07a368175..e3f8f909a 100644 --- a/tests/unit/test_exceptions.py +++ b/tests/unit/test_exceptions.py @@ -14,8 +14,8 @@ import http.client import json +from unittest import mock -import mock import pytest import requests diff --git a/tests/unit/test_extended_operation.py b/tests/unit/test_extended_operation.py index 53af52047..ab5506624 100644 --- a/tests/unit/test_extended_operation.py +++ b/tests/unit/test_extended_operation.py @@ -15,8 +15,8 @@ import dataclasses import enum import typing +from unittest import mock -import mock import pytest from google.api_core import exceptions diff --git a/tests/unit/test_general_helpers.py b/tests/unit/test_general_helpers.py deleted file mode 100644 index 82d6d4b64..000000000 --- a/tests/unit/test_general_helpers.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright 2022, Google LLC All rights reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. diff --git a/tests/unit/test_grpc_helpers.py b/tests/unit/test_grpc_helpers.py index 4eccbcaa8..ed4a92249 100644 --- a/tests/unit/test_grpc_helpers.py +++ b/tests/unit/test_grpc_helpers.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -import mock +from unittest import mock + import pytest +from ..helpers import warn_deprecated_credentials_file try: import grpc @@ -195,6 +197,23 @@ def test_trailing_metadata(self): wrapped.trailing_metadata.assert_called_once_with() +class TestGrpcStream(Test_StreamingResponseIterator): + @staticmethod + def _make_one(wrapped, **kw): + return grpc_helpers.GrpcStream(wrapped, **kw) + + def test_grpc_stream_attributes(self): + """ + Should be both a grpc.Call and an iterable + """ + call = self._make_one(None) + assert isinstance(call, grpc.Call) + # should implement __iter__ + assert hasattr(call, "__iter__") + it = call.__iter__() + assert hasattr(it, "__next__") + + def test_wrap_stream_okay(): expected_responses = [1, 2, 3] callable_ = mock.Mock(spec=["__call__"], return_value=iter(expected_responses)) @@ -348,38 +367,72 @@ def test_wrap_errors_streaming(wrap_stream_errors): wrap_stream_errors.assert_called_once_with(callable_) -@mock.patch("grpc.composite_channel_credentials") +@pytest.mark.parametrize( + "attempt_direct_path,target,expected_target", + [ + (None, "example.com:443", "example.com:443"), + (False, "example.com:443", "example.com:443"), + (True, "example.com:443", "google-c2p:///example.com"), + (True, "dns:///example.com", "google-c2p:///example.com"), + (True, "another-c2p:///example.com", "another-c2p:///example.com"), + ], +) +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch( "google.auth.default", autospec=True, return_value=(mock.sentinel.credentials, mock.sentinel.project), ) @mock.patch("grpc.secure_channel") -def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_call): - target = "example.com:443" +def test_create_channel_implicit( + grpc_secure_channel, + google_auth_default, + composite_creds_call, + attempt_direct_path, + target, + expected_target, +): composite_creds = composite_creds_call.return_value - channel = grpc_helpers.create_channel(target, compression=grpc.Compression.Gzip) + channel = grpc_helpers.create_channel( + target, + compression=grpc.Compression.Gzip, + attempt_direct_path=attempt_direct_path, + ) assert channel is grpc_secure_channel.return_value - default.assert_called_once_with(scopes=None, default_scopes=None) + google_auth_default.assert_called_once_with(scopes=None, default_scopes=None) if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER - grpc_secure_channel.assert_called_once_with(target, composite_creds, None) + # The original target is the expected target + expected_target = target + grpc_secure_channel.assert_called_once_with( + expected_target, composite_creds, None + ) else: grpc_secure_channel.assert_called_once_with( - target, composite_creds, compression=grpc.Compression.Gzip + expected_target, composite_creds, compression=grpc.Compression.Gzip ) +@pytest.mark.parametrize( + "attempt_direct_path,target, expected_target", + [ + (None, "example.com:443", "example.com:443"), + (False, "example.com:443", "example.com:443"), + (True, "example.com:443", "google-c2p:///example.com"), + (True, "dns:///example.com", "google-c2p:///example.com"), + (True, "another-c2p:///example.com", "another-c2p:///example.com"), + ], +) @mock.patch("google.auth.transport.grpc.AuthMetadataPlugin", autospec=True) @mock.patch( "google.auth.transport.requests.Request", autospec=True, return_value=mock.sentinel.Request, ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch( "google.auth.default", autospec=True, @@ -387,29 +440,48 @@ def test_create_channel_implicit(grpc_secure_channel, default, composite_creds_c ) @mock.patch("grpc.secure_channel") def test_create_channel_implicit_with_default_host( - grpc_secure_channel, default, composite_creds_call, request, auth_metadata_plugin + grpc_secure_channel, + google_auth_default, + composite_creds_call, + request, + auth_metadata_plugin, + attempt_direct_path, + target, + expected_target, ): - target = "example.com:443" default_host = "example.com" composite_creds = composite_creds_call.return_value - channel = grpc_helpers.create_channel(target, default_host=default_host) + channel = grpc_helpers.create_channel( + target, default_host=default_host, attempt_direct_path=attempt_direct_path + ) assert channel is grpc_secure_channel.return_value - default.assert_called_once_with(scopes=None, default_scopes=None) + google_auth_default.assert_called_once_with(scopes=None, default_scopes=None) auth_metadata_plugin.assert_called_once_with( mock.sentinel.credentials, mock.sentinel.Request, default_host=default_host ) if grpc_helpers.HAS_GRPC_GCP: # pragma: NO COVER - grpc_secure_channel.assert_called_once_with(target, composite_creds, None) + # The original target is the expected target + expected_target = target + grpc_secure_channel.assert_called_once_with( + expected_target, composite_creds, None + ) else: grpc_secure_channel.assert_called_once_with( - target, composite_creds, compression=None + expected_target, composite_creds, compression=None ) +@pytest.mark.parametrize( + "attempt_direct_path", + [ + None, + False, + ], +) @mock.patch("grpc.composite_channel_credentials") @mock.patch( "google.auth.default", @@ -418,13 +490,15 @@ def test_create_channel_implicit_with_default_host( ) @mock.patch("grpc.secure_channel") def test_create_channel_implicit_with_ssl_creds( - grpc_secure_channel, default, composite_creds_call + grpc_secure_channel, default, composite_creds_call, attempt_direct_path ): target = "example.com:443" ssl_creds = grpc.ssl_channel_credentials() - grpc_helpers.create_channel(target, ssl_credentials=ssl_creds) + grpc_helpers.create_channel( + target, ssl_credentials=ssl_creds, attempt_direct_path=attempt_direct_path + ) default.assert_called_once_with(scopes=None, default_scopes=None) @@ -439,7 +513,18 @@ def test_create_channel_implicit_with_ssl_creds( ) -@mock.patch("grpc.composite_channel_credentials") +def test_create_channel_implicit_with_ssl_creds_attempt_direct_path_true(): + target = "example.com:443" + ssl_creds = grpc.ssl_channel_credentials() + with pytest.raises( + ValueError, match="Using ssl_credentials with Direct Path is not supported" + ): + grpc_helpers.create_channel( + target, ssl_credentials=ssl_creds, attempt_direct_path=True + ) + + +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch( "google.auth.default", autospec=True, @@ -466,7 +551,7 @@ def test_create_channel_implicit_with_scopes( ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch( "google.auth.default", autospec=True, @@ -497,14 +582,15 @@ def test_create_channel_explicit_with_duplicate_credentials(): target = "example.com:443" with pytest.raises(exceptions.DuplicateCredentialArgs): - grpc_helpers.create_channel( - target, - credentials_file="credentials.json", - credentials=mock.sentinel.credentials, - ) + with warn_deprecated_credentials_file(): + grpc_helpers.create_channel( + target, + credentials_file="credentials.json", + credentials=mock.sentinel.credentials, + ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("google.auth.credentials.with_scopes_if_required", autospec=True) @mock.patch("grpc.secure_channel") def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_creds_call): @@ -527,7 +613,7 @@ def test_create_channel_explicit(grpc_secure_channel, auth_creds, composite_cred ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.secure_channel") def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_call): target = "example.com:443" @@ -553,7 +639,7 @@ def test_create_channel_explicit_scoped(grpc_secure_channel, composite_creds_cal ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.secure_channel") def test_create_channel_explicit_default_scopes( grpc_secure_channel, composite_creds_call @@ -583,7 +669,7 @@ def test_create_channel_explicit_default_scopes( ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.secure_channel") def test_create_channel_explicit_with_quota_project( grpc_secure_channel, composite_creds_call @@ -611,7 +697,7 @@ def test_create_channel_explicit_with_quota_project( ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.secure_channel") @mock.patch( "google.auth.load_credentials_from_file", @@ -626,7 +712,8 @@ def test_create_channel_with_credentials_file( credentials_file = "/path/to/credentials/file.json" composite_creds = composite_creds_call.return_value - channel = grpc_helpers.create_channel(target, credentials_file=credentials_file) + with warn_deprecated_credentials_file(): + channel = grpc_helpers.create_channel(target, credentials_file=credentials_file) google.auth.load_credentials_from_file.assert_called_once_with( credentials_file, scopes=None, default_scopes=None @@ -642,7 +729,7 @@ def test_create_channel_with_credentials_file( ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.secure_channel") @mock.patch( "google.auth.load_credentials_from_file", @@ -658,9 +745,10 @@ def test_create_channel_with_credentials_file_and_scopes( credentials_file = "/path/to/credentials/file.json" composite_creds = composite_creds_call.return_value - channel = grpc_helpers.create_channel( - target, credentials_file=credentials_file, scopes=scopes - ) + with warn_deprecated_credentials_file(): + channel = grpc_helpers.create_channel( + target, credentials_file=credentials_file, scopes=scopes + ) google.auth.load_credentials_from_file.assert_called_once_with( credentials_file, scopes=scopes, default_scopes=None @@ -676,7 +764,7 @@ def test_create_channel_with_credentials_file_and_scopes( ) -@mock.patch("grpc.composite_channel_credentials") +@mock.patch("grpc.compute_engine_channel_credentials") @mock.patch("grpc.secure_channel") @mock.patch( "google.auth.load_credentials_from_file", @@ -692,9 +780,10 @@ def test_create_channel_with_credentials_file_and_default_scopes( credentials_file = "/path/to/credentials/file.json" composite_creds = composite_creds_call.return_value - channel = grpc_helpers.create_channel( - target, credentials_file=credentials_file, default_scopes=default_scopes - ) + with warn_deprecated_credentials_file(): + channel = grpc_helpers.create_channel( + target, credentials_file=credentials_file, default_scopes=default_scopes + ) load_credentials_from_file.assert_called_once_with( credentials_file, scopes=None, default_scopes=default_scopes diff --git a/tests/unit/test_iam.py b/tests/unit/test_iam.py index fbd242e56..3de152887 100644 --- a/tests/unit/test_iam.py +++ b/tests/unit/test_iam.py @@ -167,14 +167,15 @@ def test_owners_getter(self): assert policy.owners == expected def test_owners_setter(self): - import warnings from google.api_core.iam import OWNER_ROLE MEMBER = "user:phred@example.com" expected = set([MEMBER]) policy = self._make_one() - with warnings.catch_warnings(record=True) as warned: + with pytest.warns( + DeprecationWarning, match="Assigning to 'owners' is deprecated." + ) as warned: policy.owners = [MEMBER] (warning,) = warned @@ -191,14 +192,15 @@ def test_editors_getter(self): assert policy.editors == expected def test_editors_setter(self): - import warnings from google.api_core.iam import EDITOR_ROLE MEMBER = "user:phred@example.com" expected = set([MEMBER]) policy = self._make_one() - with warnings.catch_warnings(record=True) as warned: + with pytest.warns( + DeprecationWarning, match="Assigning to 'editors' is deprecated." + ) as warned: policy.editors = [MEMBER] (warning,) = warned @@ -215,14 +217,15 @@ def test_viewers_getter(self): assert policy.viewers == expected def test_viewers_setter(self): - import warnings from google.api_core.iam import VIEWER_ROLE MEMBER = "user:phred@example.com" expected = set([MEMBER]) policy = self._make_one() - with warnings.catch_warnings(record=True) as warned: + with pytest.warns( + DeprecationWarning, match="Assigning to 'viewers' is deprecated." + ) as warned: policy.viewers = [MEMBER] (warning,) = warned @@ -337,12 +340,13 @@ def test_to_api_repr_binding_wo_members(self): assert policy.to_api_repr() == {} def test_to_api_repr_binding_w_duplicates(self): - import warnings from google.api_core.iam import OWNER_ROLE OWNER = "group:cloud-logs@google.com" policy = self._make_one() - with warnings.catch_warnings(record=True): + with pytest.warns( + DeprecationWarning, match="Assigning to 'owners' is deprecated." + ): policy.owners = [OWNER, OWNER] assert policy.to_api_repr() == { "bindings": [{"role": OWNER_ROLE, "members": [OWNER]}] diff --git a/tests/unit/test_operation.py b/tests/unit/test_operation.py index f029866c6..806807201 100644 --- a/tests/unit/test_operation.py +++ b/tests/unit/test_operation.py @@ -13,7 +13,8 @@ # limitations under the License. -import mock +from unittest import mock + import pytest try: diff --git a/tests/unit/test_packaging.py b/tests/unit/test_packaging.py new file mode 100644 index 000000000..8100a496b --- /dev/null +++ b/tests/unit/test_packaging.py @@ -0,0 +1,28 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess +import sys + + +def test_namespace_package_compat(tmp_path): + # The ``google`` namespace package should not be masked + # by the presence of ``google-api-core``. + google = tmp_path / "google" + google.mkdir() + google.joinpath("othermod.py").write_text("") + env = dict(os.environ, PYTHONPATH=str(tmp_path)) + cmd = [sys.executable, "-m", "google.othermod"] + subprocess.check_call(cmd, env=env) diff --git a/tests/unit/test_page_iterator.py b/tests/unit/test_page_iterator.py index cf43aedfb..ba0fbba4f 100644 --- a/tests/unit/test_page_iterator.py +++ b/tests/unit/test_page_iterator.py @@ -14,8 +14,8 @@ import math import types +from unittest import mock -import mock import pytest from google.api_core import page_iterator @@ -360,7 +360,6 @@ def test__has_next_page_w_max_results_not_done(self): assert iterator._has_next_page() def test__has_next_page_w_max_results_done(self): - iterator = page_iterator.HTTPIterator( mock.sentinel.client, mock.sentinel.api_request, diff --git a/tests/unit/test_path_template.py b/tests/unit/test_path_template.py index 808b36f34..c34dd0f32 100644 --- a/tests/unit/test_path_template.py +++ b/tests/unit/test_path_template.py @@ -13,8 +13,8 @@ # limitations under the License. from __future__ import unicode_literals +from unittest import mock -import mock import pytest from google.api import auth_pb2 diff --git a/tests/unit/test_protobuf_helpers.py b/tests/unit/test_protobuf_helpers.py index 3df45df5f..5678d3bc5 100644 --- a/tests/unit/test_protobuf_helpers.py +++ b/tests/unit/test_protobuf_helpers.py @@ -12,9 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import sys - import pytest +import re from google.api import http_pb2 from google.api_core import protobuf_helpers @@ -67,7 +66,12 @@ def test_from_any_pb_failure(): in_message = any_pb2.Any() in_message.Pack(date_pb2.Date(year=1990)) - with pytest.raises(TypeError): + with pytest.raises( + TypeError, + match=re.escape( + "Could not convert `google.type.Date` with underlying type `google.protobuf.any_pb2.Any` to `google.type.TimeOfDay`" + ), + ): protobuf_helpers.from_any_pb(timeofday_pb2.TimeOfDay, in_message) @@ -476,11 +480,6 @@ def test_field_mask_different_level_diffs(): ] -@pytest.mark.skipif( - sys.version_info.major == 2, - reason="Field names with trailing underscores can only be created" - "through proto-plus, which is Python 3 only.", -) def test_field_mask_ignore_trailing_underscore(): import proto @@ -496,11 +495,6 @@ class Foo(proto.Message): ] -@pytest.mark.skipif( - sys.version_info.major == 2, - reason="Field names with trailing underscores can only be created" - "through proto-plus, which is Python 3 only.", -) def test_field_mask_ignore_trailing_underscore_with_nesting(): import proto diff --git a/tests/unit/test_python_package_support.py b/tests/unit/test_python_package_support.py new file mode 100644 index 000000000..6a93e7154 --- /dev/null +++ b/tests/unit/test_python_package_support.py @@ -0,0 +1,140 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import warnings +from unittest.mock import patch + +import pytest + +from google.api_core._python_package_support import ( + parse_version_to_tuple, + get_dependency_version, + warn_deprecation_for_versions_less_than, + check_dependency_versions, + DependencyConstraint, + DependencyVersion, +) + + +@pytest.mark.parametrize("version_string_to_test", ["1.2.3", "1.2.3b1"]) +def test_get_dependency_version(mocker, version_string_to_test): + """Test get_dependency_version.""" + if sys.version_info >= (3, 8): + mock_importlib = mocker.patch( + "importlib.metadata.version", return_value=version_string_to_test + ) + else: + # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove + # `importlib_metadata` once we drop support for Python 3.7 + mock_importlib = mocker.patch( + "importlib_metadata.version", return_value=version_string_to_test + ) + expected = DependencyVersion( + parse_version_to_tuple(version_string_to_test), version_string_to_test + ) + assert get_dependency_version("some-package") == expected + + mock_importlib.assert_called_once_with("some-package") + + # Test package not found + mock_importlib.side_effect = ImportError + assert get_dependency_version("not-a-package") == DependencyVersion(None, "--") + + +@patch("google.api_core._python_package_support._get_distribution_and_import_packages") +@patch("google.api_core._python_package_support.get_dependency_version") +def test_warn_deprecation_for_versions_less_than(mock_get_version, mock_get_packages): + """Test the deprecation warning logic.""" + # Mock the helper function to return predictable package strings + mock_get_packages.side_effect = [ + ("dep-package (dep.package)", "dep-package"), + ("my-package (my.package)", "my-package"), + ] + + mock_get_version.return_value = DependencyVersion( + parse_version_to_tuple("1.0.0"), "1.0.0" + ) + with pytest.warns(FutureWarning) as record: + warn_deprecation_for_versions_less_than("my.package", "dep.package", "2.0.0") + assert len(record) == 1 + assert ( + "DEPRECATION: Package my-package (my.package) depends on dep-package (dep.package)" + in str(record[0].message) + ) + + # Cases where no warning should be issued + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") # Capture all warnings + + # Case 2: Installed version is equal to required, should not warn. + mock_get_packages.reset_mock() + mock_get_version.return_value = DependencyVersion( + parse_version_to_tuple("2.0.0"), "2.0.0" + ) + warn_deprecation_for_versions_less_than("my.package", "dep.package", "2.0.0") + + # Case 3: Installed version is greater than required, should not warn. + mock_get_packages.reset_mock() + mock_get_version.return_value = DependencyVersion( + parse_version_to_tuple("3.0.0"), "3.0.0" + ) + warn_deprecation_for_versions_less_than("my.package", "dep.package", "2.0.0") + + # Case 4: Dependency not found, should not warn. + mock_get_packages.reset_mock() + mock_get_version.return_value = DependencyVersion(None, "--") + warn_deprecation_for_versions_less_than("my.package", "dep.package", "2.0.0") + + # Assert that no warnings were recorded + assert len(w) == 0 + + # Case 5: Custom message template. + mock_get_packages.reset_mock() + mock_get_packages.side_effect = [ + ("dep-package (dep.package)", "dep-package"), + ("my-package (my.package)", "my-package"), + ] + mock_get_version.return_value = DependencyVersion( + parse_version_to_tuple("1.0.0"), "1.0.0" + ) + template = "Custom warning for {dependency_package} used by {consumer_package}." + with pytest.warns(FutureWarning) as record: + warn_deprecation_for_versions_less_than( + "my.package", "dep.package", "2.0.0", message_template=template + ) + assert len(record) == 1 + assert ( + "Custom warning for dep-package (dep.package) used by my-package (my.package)." + in str(record[0].message) + ) + + +@patch( + "google.api_core._python_package_support.warn_deprecation_for_versions_less_than" +) +def test_check_dependency_versions_with_custom_warnings(mock_warn): + """Test check_dependency_versions with custom warning parameters.""" + custom_warning1 = DependencyConstraint("pkg1", "1.0.0", "2.0.0") + custom_warning2 = DependencyConstraint("pkg2", "2.0.0", "3.0.0") + + check_dependency_versions("my-consumer", custom_warning1, custom_warning2) + + assert mock_warn.call_count == 2 + mock_warn.assert_any_call( + "my-consumer", "pkg1", "1.0.0", recommended_version="2.0.0" + ) + mock_warn.assert_any_call( + "my-consumer", "pkg2", "2.0.0", recommended_version="3.0.0" + ) diff --git a/tests/unit/test_python_version_support.py b/tests/unit/test_python_version_support.py new file mode 100644 index 000000000..76eb821e0 --- /dev/null +++ b/tests/unit/test_python_version_support.py @@ -0,0 +1,257 @@ +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import datetime +import textwrap +import warnings +from collections import namedtuple + +from unittest.mock import patch + +# Code to be tested +from google.api_core._python_version_support import ( + _flatten_message, + check_python_version, + PythonVersionStatus, + PYTHON_VERSION_INFO, +) + +# Helper object for mocking sys.version_info +VersionInfoMock = namedtuple("VersionInfoMock", ["major", "minor"]) + + +def test_flatten_message(): + """Test that _flatten_message correctly dedents and flattens a string.""" + input_text = """ + This is a multi-line + string with some + indentation. + """ + expected_output = "This is a multi-line string with some indentation." + assert _flatten_message(input_text) == expected_output + + +def _create_failure_message( + expected, result, py_version, date, gapic_dep, py_eol, eol_warn, gapic_end +): + """Create a detailed failure message for a test.""" + return textwrap.dedent( # pragma: NO COVER + f""" + --- Test Failed --- + Expected status: {expected.name} + Received status: {result.name} + --------------------- + Context: + - Mocked Python Version: {py_version} + - Mocked Today's Date: {date} + Calculated Dates: + - gapic_deprecation: {gapic_dep} + - python_eol: {py_eol} + - eol_warning_starts: {eol_warn} + - gapic_end: {gapic_end} + """ + ) + + +def generate_tracked_version_test_cases(): + """ + Yields test parameters for all tracked versions and boundary conditions. + """ + for version_tuple, version_info in PYTHON_VERSION_INFO.items(): + py_version_str = f"{version_tuple[0]}.{version_tuple[1]}" + gapic_dep = version_info.gapic_deprecation or ( + version_info.python_eol - datetime.timedelta(days=365) + ) + gapic_end = version_info.gapic_end or ( + version_info.python_eol + datetime.timedelta(weeks=1) + ) + eol_warning_starts = version_info.python_eol + datetime.timedelta(weeks=1) + + test_cases = { + "supported_before_deprecation_date": { + "date": gapic_dep - datetime.timedelta(days=1), + "expected": PythonVersionStatus.PYTHON_VERSION_SUPPORTED, + }, + "deprecated_on_deprecation_date": { + "date": gapic_dep, + "expected": PythonVersionStatus.PYTHON_VERSION_DEPRECATED, + }, + "deprecated_on_eol_date": { + "date": version_info.python_eol, + "expected": PythonVersionStatus.PYTHON_VERSION_DEPRECATED, + }, + "deprecated_before_eol_warning_starts": { + "date": eol_warning_starts - datetime.timedelta(days=1), + "expected": PythonVersionStatus.PYTHON_VERSION_DEPRECATED, + }, + "eol_on_eol_warning_date": { + "date": eol_warning_starts, + "expected": PythonVersionStatus.PYTHON_VERSION_EOL, + }, + "eol_on_gapic_end_date": { + "date": gapic_end, + "expected": PythonVersionStatus.PYTHON_VERSION_EOL, + }, + "unsupported_after_gapic_end_date": { + "date": gapic_end + datetime.timedelta(days=1), + "expected": PythonVersionStatus.PYTHON_VERSION_UNSUPPORTED, + }, + } + + for name, params in test_cases.items(): + yield pytest.param( + version_tuple, + params["date"], + params["expected"], + gapic_dep, + gapic_end, + eol_warning_starts, + id=f"{py_version_str}-{name}", + ) + + +@pytest.mark.parametrize( + "version_tuple, mock_date, expected_status, gapic_dep, gapic_end, eol_warning_starts", + generate_tracked_version_test_cases(), +) +def test_all_tracked_versions_and_date_scenarios( + version_tuple, mock_date, expected_status, gapic_dep, gapic_end, eol_warning_starts +): + """Test all outcomes for each tracked version using parametrization.""" + mock_py_v = VersionInfoMock(major=version_tuple[0], minor=version_tuple[1]) + + with patch("google.api_core._python_version_support.sys.version_info", mock_py_v): + # Supported versions should not issue warnings + if expected_status == PythonVersionStatus.PYTHON_VERSION_SUPPORTED: + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + result = check_python_version(today=mock_date) + assert len(w) == 0 + # All other statuses should issue a warning + else: + with pytest.warns(FutureWarning) as record: + result = check_python_version(today=mock_date) + assert len(record) == 1 + + if result != expected_status: # pragma: NO COVER + py_version_str = f"{version_tuple[0]}.{version_tuple[1]}" + version_info = PYTHON_VERSION_INFO[version_tuple] + + fail_msg = _create_failure_message( + expected_status, + result, + py_version_str, + mock_date, + gapic_dep, + version_info.python_eol, + eol_warning_starts, + gapic_end, + ) + pytest.fail(fail_msg, pytrace=False) + + +def test_override_gapic_end_only(): + """Test behavior when only gapic_end is manually overridden.""" + version_tuple = (3, 9) + original_info = PYTHON_VERSION_INFO[version_tuple] + mock_py_version = VersionInfoMock(major=version_tuple[0], minor=version_tuple[1]) + + custom_gapic_end = original_info.python_eol + datetime.timedelta(days=212) + overridden_info = original_info._replace(gapic_end=custom_gapic_end) + + with patch( + "google.api_core._python_version_support.sys.version_info", mock_py_version + ): + with patch.dict( + "google.api_core._python_version_support.PYTHON_VERSION_INFO", + {version_tuple: overridden_info}, + ): + with pytest.warns(FutureWarning, match="past its end of life"): + result_before_boundary = check_python_version( + today=custom_gapic_end + datetime.timedelta(days=-1) + ) + assert result_before_boundary == PythonVersionStatus.PYTHON_VERSION_EOL + + with pytest.warns(FutureWarning, match="past its end of life"): + result_at_boundary = check_python_version(today=custom_gapic_end) + assert result_at_boundary == PythonVersionStatus.PYTHON_VERSION_EOL + + with pytest.warns(FutureWarning, match="non-supported Python version"): + result_after_boundary = check_python_version( + today=custom_gapic_end + datetime.timedelta(days=1) + ) + assert ( + result_after_boundary == PythonVersionStatus.PYTHON_VERSION_UNSUPPORTED + ) + + +def test_override_gapic_deprecation_only(): + """Test behavior when only gapic_deprecation is manually overridden.""" + version_tuple = (3, 9) + original_info = PYTHON_VERSION_INFO[version_tuple] + mock_py_version = VersionInfoMock(major=version_tuple[0], minor=version_tuple[1]) + + custom_gapic_dep = original_info.python_eol - datetime.timedelta(days=120) + overridden_info = original_info._replace(gapic_deprecation=custom_gapic_dep) + + with patch( + "google.api_core._python_version_support.sys.version_info", mock_py_version + ): + with patch.dict( + "google.api_core._python_version_support.PYTHON_VERSION_INFO", + {version_tuple: overridden_info}, + ): + result_before_boundary = check_python_version( + today=custom_gapic_dep - datetime.timedelta(days=1) + ) + assert ( + result_before_boundary == PythonVersionStatus.PYTHON_VERSION_SUPPORTED + ) + + with pytest.warns(FutureWarning, match="Google will stop supporting"): + result_at_boundary = check_python_version(today=custom_gapic_dep) + assert result_at_boundary == PythonVersionStatus.PYTHON_VERSION_DEPRECATED + + +def test_untracked_older_version_is_unsupported(): + """Test that an old, untracked version is unsupported and logs.""" + mock_py_version = VersionInfoMock(major=3, minor=6) + + with patch( + "google.api_core._python_version_support.sys.version_info", mock_py_version + ): + with pytest.warns(FutureWarning) as record: + mock_date = datetime.date(2025, 1, 15) + result = check_python_version(today=mock_date) + + assert result == PythonVersionStatus.PYTHON_VERSION_UNSUPPORTED + assert len(record) == 1 + assert "non-supported" in str(record[0].message) + + +def test_untracked_newer_version_is_supported(): + """Test that a new, untracked version is supported and does not log.""" + mock_py_version = VersionInfoMock(major=40, minor=0) + + with patch( + "google.api_core._python_version_support.sys.version_info", mock_py_version + ): + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + mock_date = datetime.date(2025, 1, 15) + result = check_python_version(today=mock_date) + + assert result == PythonVersionStatus.PYTHON_VERSION_SUPPORTED + assert len(w) == 0 diff --git a/tests/unit/test_rest_streaming.py b/tests/unit/test_rest_streaming.py index a44c83c0a..0f998dfe7 100644 --- a/tests/unit/test_rest_streaming.py +++ b/tests/unit/test_rest_streaming.py @@ -24,47 +24,18 @@ import requests from google.api_core import rest_streaming -from google.protobuf import duration_pb2 -from google.protobuf import timestamp_pb2 +from google.api import http_pb2 +from google.api import httpbody_pb2 + +from ..helpers import Composer, Song, EchoResponse, parse_responses __protobuf__ = proto.module(package=__name__) SEED = int(time.time()) -logging.info(f"Starting rest streaming tests with random seed: {SEED}") +logging.info(f"Starting sync rest streaming tests with random seed: {SEED}") random.seed(SEED) -class Genre(proto.Enum): - GENRE_UNSPECIFIED = 0 - CLASSICAL = 1 - JAZZ = 2 - ROCK = 3 - - -class Composer(proto.Message): - given_name = proto.Field(proto.STRING, number=1) - family_name = proto.Field(proto.STRING, number=2) - relateds = proto.RepeatedField(proto.STRING, number=3) - indices = proto.MapField(proto.STRING, proto.STRING, number=4) - - -class Song(proto.Message): - composer = proto.Field(Composer, number=1) - title = proto.Field(proto.STRING, number=2) - lyrics = proto.Field(proto.STRING, number=3) - year = proto.Field(proto.INT32, number=4) - genre = proto.Field(Genre, number=5) - is_five_mins_longer = proto.Field(proto.BOOL, number=6) - score = proto.Field(proto.DOUBLE, number=7) - likes = proto.Field(proto.INT64, number=8) - duration = proto.Field(duration_pb2.Duration, number=9) - date_added = proto.Field(timestamp_pb2.Timestamp, number=10) - - -class EchoResponse(proto.Message): - content = proto.Field(proto.STRING, number=1) - - class ResponseMock(requests.Response): class _ResponseItr: def __init__(self, _response_bytes: bytes, random_split=False): @@ -94,123 +65,232 @@ def __init__( self._random_split = random_split self._response_message_cls = response_cls - def _parse_responses(self, responses: List[proto.Message]) -> bytes: - # json.dumps returns a string surrounded with quotes that need to be stripped - # in order to be an actual JSON. - json_responses = [ - self._response_message_cls.to_json(r).strip('"') for r in responses - ] - logging.info(f"Sending JSON stream: {json_responses}") - ret_val = "[{}]".format(",".join(json_responses)) - return bytes(ret_val, "utf-8") + def _parse_responses(self): + return parse_responses(self._response_message_cls, self._responses) def close(self): raise NotImplementedError() def iter_content(self, *args, **kwargs): return self._ResponseItr( - self._parse_responses(self._responses), + self._parse_responses(), random_split=self._random_split, ) -@pytest.mark.parametrize("random_split", [False]) -def test_next_simple(random_split): - responses = [EchoResponse(content="hello world"), EchoResponse(content="yes")] +@pytest.mark.parametrize( + "random_split,resp_message_is_proto_plus", + [(False, True), (False, False)], +) +def test_next_simple(random_split, resp_message_is_proto_plus): + if resp_message_is_proto_plus: + response_type = EchoResponse + responses = [EchoResponse(content="hello world"), EchoResponse(content="yes")] + else: + response_type = httpbody_pb2.HttpBody + responses = [ + httpbody_pb2.HttpBody(content_type="hello world"), + httpbody_pb2.HttpBody(content_type="yes"), + ] + resp = ResponseMock( - responses=responses, random_split=random_split, response_cls=EchoResponse + responses=responses, random_split=random_split, response_cls=response_type ) - itr = rest_streaming.ResponseIterator(resp, EchoResponse) + itr = rest_streaming.ResponseIterator(resp, response_type) assert list(itr) == responses -@pytest.mark.parametrize("random_split", [True, False]) -def test_next_nested(random_split): - responses = [ - Song(title="some song", composer=Composer(given_name="some name")), - Song(title="another song", date_added=datetime.datetime(2021, 12, 17)), - ] +@pytest.mark.parametrize( + "random_split,resp_message_is_proto_plus", + [ + (True, True), + (False, True), + (True, False), + (False, False), + ], +) +def test_next_nested(random_split, resp_message_is_proto_plus): + if resp_message_is_proto_plus: + response_type = Song + responses = [ + Song(title="some song", composer=Composer(given_name="some name")), + Song(title="another song", date_added=datetime.datetime(2021, 12, 17)), + ] + else: + # Although `http_pb2.HttpRule`` is used in the response, any response message + # can be used which meets this criteria for the test of having a nested field. + response_type = http_pb2.HttpRule + responses = [ + http_pb2.HttpRule( + selector="some selector", + custom=http_pb2.CustomHttpPattern(kind="some kind"), + ), + http_pb2.HttpRule( + selector="another selector", + custom=http_pb2.CustomHttpPattern(path="some path"), + ), + ] resp = ResponseMock( - responses=responses, random_split=random_split, response_cls=Song + responses=responses, random_split=random_split, response_cls=response_type ) - itr = rest_streaming.ResponseIterator(resp, Song) + itr = rest_streaming.ResponseIterator(resp, response_type) assert list(itr) == responses -@pytest.mark.parametrize("random_split", [True, False]) -def test_next_stress(random_split): +@pytest.mark.parametrize( + "random_split,resp_message_is_proto_plus", + [ + (True, True), + (False, True), + (True, False), + (False, False), + ], +) +def test_next_stress(random_split, resp_message_is_proto_plus): n = 50 - responses = [ - Song(title="title_%d" % i, composer=Composer(given_name="name_%d" % i)) - for i in range(n) - ] + if resp_message_is_proto_plus: + response_type = Song + responses = [ + Song(title="title_%d" % i, composer=Composer(given_name="name_%d" % i)) + for i in range(n) + ] + else: + response_type = http_pb2.HttpRule + responses = [ + http_pb2.HttpRule( + selector="selector_%d" % i, + custom=http_pb2.CustomHttpPattern(path="path_%d" % i), + ) + for i in range(n) + ] resp = ResponseMock( - responses=responses, random_split=random_split, response_cls=Song + responses=responses, random_split=random_split, response_cls=response_type ) - itr = rest_streaming.ResponseIterator(resp, Song) + itr = rest_streaming.ResponseIterator(resp, response_type) assert list(itr) == responses -@pytest.mark.parametrize("random_split", [True, False]) -def test_next_escaped_characters_in_string(random_split): - composer_with_relateds = Composer() - relateds = ["Artist A", "Artist B"] - composer_with_relateds.relateds = relateds - - responses = [ - Song(title='ti"tle\nfoo\tbar{}', composer=Composer(given_name="name\n\n\n")), - Song( - title='{"this is weird": "totally"}', composer=Composer(given_name="\\{}\\") - ), - Song(title='\\{"key": ["value",]}\\', composer=composer_with_relateds), - ] +@pytest.mark.parametrize( + "random_split,resp_message_is_proto_plus", + [ + (True, True), + (False, True), + (True, False), + (False, False), + ], +) +def test_next_escaped_characters_in_string(random_split, resp_message_is_proto_plus): + if resp_message_is_proto_plus: + response_type = Song + composer_with_relateds = Composer() + relateds = ["Artist A", "Artist B"] + composer_with_relateds.relateds = relateds + + responses = [ + Song( + title='ti"tle\nfoo\tbar{}', composer=Composer(given_name="name\n\n\n") + ), + Song( + title='{"this is weird": "totally"}', + composer=Composer(given_name="\\{}\\"), + ), + Song(title='\\{"key": ["value",]}\\', composer=composer_with_relateds), + ] + else: + response_type = http_pb2.Http + responses = [ + http_pb2.Http( + rules=[ + http_pb2.HttpRule( + selector='ti"tle\nfoo\tbar{}', + custom=http_pb2.CustomHttpPattern(kind="name\n\n\n"), + ) + ] + ), + http_pb2.Http( + rules=[ + http_pb2.HttpRule( + selector='{"this is weird": "totally"}', + custom=http_pb2.CustomHttpPattern(kind="\\{}\\"), + ) + ] + ), + http_pb2.Http( + rules=[ + http_pb2.HttpRule( + selector='\\{"key": ["value",]}\\', + custom=http_pb2.CustomHttpPattern(kind="\\{}\\"), + ) + ] + ), + ] resp = ResponseMock( - responses=responses, random_split=random_split, response_cls=Song + responses=responses, random_split=random_split, response_cls=response_type ) - itr = rest_streaming.ResponseIterator(resp, Song) + itr = rest_streaming.ResponseIterator(resp, response_type) assert list(itr) == responses -def test_next_not_array(): +@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody]) +def test_next_not_array(response_type): with patch.object( ResponseMock, "iter_content", return_value=iter('{"hello": 0}') ) as mock_method: - - resp = ResponseMock(responses=[], response_cls=EchoResponse) - itr = rest_streaming.ResponseIterator(resp, EchoResponse) + resp = ResponseMock(responses=[], response_cls=response_type) + itr = rest_streaming.ResponseIterator(resp, response_type) with pytest.raises(ValueError): next(itr) mock_method.assert_called_once() -def test_cancel(): +@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody]) +def test_cancel(response_type): with patch.object(ResponseMock, "close", return_value=None) as mock_method: - resp = ResponseMock(responses=[], response_cls=EchoResponse) - itr = rest_streaming.ResponseIterator(resp, EchoResponse) + resp = ResponseMock(responses=[], response_cls=response_type) + itr = rest_streaming.ResponseIterator(resp, response_type) itr.cancel() mock_method.assert_called_once() -def test_check_buffer(): +@pytest.mark.parametrize( + "response_type,return_value", + [ + (EchoResponse, bytes('[{"content": "hello"}, {', "utf-8")), + (httpbody_pb2.HttpBody, bytes('[{"content_type": "hello"}, {', "utf-8")), + ], +) +def test_check_buffer(response_type, return_value): with patch.object( ResponseMock, "_parse_responses", - return_value=bytes('[{"content": "hello"}, {', "utf-8"), + return_value=return_value, ): - resp = ResponseMock(responses=[], response_cls=EchoResponse) - itr = rest_streaming.ResponseIterator(resp, EchoResponse) + resp = ResponseMock(responses=[], response_cls=response_type) + itr = rest_streaming.ResponseIterator(resp, response_type) with pytest.raises(ValueError): next(itr) next(itr) -def test_next_html(): +@pytest.mark.parametrize("response_type", [EchoResponse, httpbody_pb2.HttpBody]) +def test_next_html(response_type): with patch.object( ResponseMock, "iter_content", return_value=iter("") ) as mock_method: - - resp = ResponseMock(responses=[], response_cls=EchoResponse) - itr = rest_streaming.ResponseIterator(resp, EchoResponse) + resp = ResponseMock(responses=[], response_cls=response_type) + itr = rest_streaming.ResponseIterator(resp, response_type) with pytest.raises(ValueError): next(itr) mock_method.assert_called_once() + + +def test_invalid_response_class(): + class SomeClass: + pass + + resp = ResponseMock(responses=[], response_cls=SomeClass) + with pytest.raises( + ValueError, + match="Response message class must be a subclass of proto.Message or google.protobuf.message.Message", + ): + rest_streaming.ResponseIterator(resp, SomeClass) diff --git a/tests/unit/test_timeout.py b/tests/unit/test_timeout.py index a83a2ecb0..8ce143f35 100644 --- a/tests/unit/test_timeout.py +++ b/tests/unit/test_timeout.py @@ -14,8 +14,8 @@ import datetime import itertools - -import mock +import pytest +from unittest import mock from google.api_core import timeout as timeouts @@ -58,10 +58,10 @@ def test___str__(self): def test_apply(self): target = mock.Mock(spec=["__call__", "__name__"], __name__="target") - datetime.datetime.utcnow() + datetime.datetime.now(tz=datetime.timezone.utc) datetime.timedelta(seconds=1) - now = datetime.datetime.utcnow() + now = datetime.datetime.now(tz=datetime.timezone.utc) times = [ now, @@ -85,17 +85,17 @@ def _clock(): wrapped() target.assert_called_with(timeout=3.0) wrapped() - target.assert_called_with(timeout=0.0) + target.assert_called_with(timeout=42.0) wrapped() - target.assert_called_with(timeout=0.0) + target.assert_called_with(timeout=42.0) def test_apply_no_timeout(self): target = mock.Mock(spec=["__call__", "__name__"], __name__="target") - datetime.datetime.utcnow() + datetime.datetime.now(tz=datetime.timezone.utc) datetime.timedelta(seconds=1) - now = datetime.datetime.utcnow() + now = datetime.datetime.now(tz=datetime.timezone.utc) times = [ now, @@ -122,7 +122,15 @@ def test_apply_passthrough(self): wrapped(1, 2, meep="moop") - target.assert_called_once_with(1, 2, meep="moop", timeout=42.0) + actual_arg_0 = target.call_args[0][0] + actual_arg_1 = target.call_args[0][1] + actual_arg_meep = target.call_args[1]["meep"] + actual_arg_timeuut = target.call_args[1]["timeout"] + + assert actual_arg_0 == 1 + assert actual_arg_1 == 2 + assert actual_arg_meep == "moop" + assert actual_arg_timeuut == pytest.approx(42.0, abs=0.01) class TestConstantTimeout(object): diff --git a/tests/unit/test_universe.py b/tests/unit/test_universe.py new file mode 100644 index 000000000..214e00acc --- /dev/null +++ b/tests/unit/test_universe.py @@ -0,0 +1,63 @@ +# Copyright 2024 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from google.api_core import universe + + +class _Fake_Credentials: + def __init__(self, universe_domain=None): + if universe_domain: + self.universe_domain = universe_domain + + +def test_determine_domain(): + domain_client = "foo.com" + domain_env = "bar.com" + + assert universe.determine_domain(domain_client, domain_env) == domain_client + assert universe.determine_domain(None, domain_env) == domain_env + assert universe.determine_domain(domain_client, None) == domain_client + assert universe.determine_domain(None, None) == universe.DEFAULT_UNIVERSE + + with pytest.raises(universe.EmptyUniverseError): + universe.determine_domain("", None) + + with pytest.raises(universe.EmptyUniverseError): + universe.determine_domain(None, "") + + +def test_compare_domains(): + fake_domain = "foo.com" + another_fake_domain = "bar.com" + + assert universe.compare_domains(universe.DEFAULT_UNIVERSE, _Fake_Credentials()) + assert universe.compare_domains(fake_domain, _Fake_Credentials(fake_domain)) + + with pytest.raises(universe.UniverseMismatchError) as excinfo: + universe.compare_domains( + universe.DEFAULT_UNIVERSE, _Fake_Credentials(fake_domain) + ) + assert str(excinfo.value).find(universe.DEFAULT_UNIVERSE) >= 0 + assert str(excinfo.value).find(fake_domain) >= 0 + + with pytest.raises(universe.UniverseMismatchError) as excinfo: + universe.compare_domains(fake_domain, _Fake_Credentials()) + assert str(excinfo.value).find(fake_domain) >= 0 + assert str(excinfo.value).find(universe.DEFAULT_UNIVERSE) >= 0 + + with pytest.raises(universe.UniverseMismatchError) as excinfo: + universe.compare_domains(fake_domain, _Fake_Credentials(another_fake_domain)) + assert str(excinfo.value).find(fake_domain) >= 0 + assert str(excinfo.value).find(another_fake_domain) >= 0 diff --git a/.github/.OwlBot.lock.yaml b/tests/unit/test_version_header.py similarity index 60% rename from .github/.OwlBot.lock.yaml rename to tests/unit/test_version_header.py index 453b540c1..ea7028e27 100644 --- a/.github/.OwlBot.lock.yaml +++ b/tests/unit/test_version_header.py @@ -1,4 +1,4 @@ -# Copyright 2023 Google LLC +# Copyright 2024 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,7 +11,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -docker: - image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:caffe0a9277daeccc4d1de5c9b55ebba0901b57c2f713ec9c876b0d4ec064f61 -# created: 2023-11-08T19:46:45.022803742Z + +import pytest + +from google.api_core import version_header + + +@pytest.mark.parametrize("version_identifier", ["some_value", ""]) +def test_to_api_version_header(version_identifier): + value = version_header.to_api_version_header(version_identifier) + assert value == (version_header.API_VERSION_METADATA_KEY, version_identifier)