diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 108063d4..b668c04d 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4ee57a76a176ede9087c14330c625a71553cf9c72828b2c0ca12f5338171ba60 + digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 + diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 6da5f48a..6116b837 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,8 +3,10 @@ # # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax +# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. +# @googleapis/yoshi-python @googleapis/firestore-dpe are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/firestore-dpe -# The firestore-dpe team is the default owner for anything not -# explicitly taken by someone else. -* @googleapis/firestore-dpe @googleapis/yoshi-python +# @googleapis/python-samples-reviewers @googleapis/firestore-dpe are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/firestore-dpe diff --git a/.github/release-please.yml b/.github/release-please.yml index 4507ad05..466597e5 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1 +1,2 @@ releaseType: python +handleGHRelease: true diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml new file mode 100644 index 00000000..d4ca9418 --- /dev/null +++ b/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 00000000..f7b8344c --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,38 @@ +on: + pull_request: + branches: + - main +name: docs +jobs: + docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docs + run: | + nox -s docs + docfx: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docfx + run: | + nox -s docfx diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 00000000..1e8b05c3 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,25 @@ +on: + pull_request: + branches: + - main +name: lint +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run lint + run: | + nox -s lint + - name: Run lint_setup_py + run: | + nox -s lint_setup_py diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml new file mode 100644 index 00000000..5a0f0e09 --- /dev/null +++ b/.github/workflows/mypy.yml @@ -0,0 +1,22 @@ +on: + pull_request: + branches: + - main +name: mypy +jobs: + mypy: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.8" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run mypy + run: | + nox -s mypy diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml new file mode 100644 index 00000000..074ee250 --- /dev/null +++ b/.github/workflows/unittest.yml @@ -0,0 +1,57 @@ +on: + pull_request: + branches: + - main +name: unittest +jobs: + unit: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.6', '3.7', '3.8', '3.9', '3.10'] + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run unit tests + env: + COVERAGE_FILE: .coverage-${{ matrix.python }} + run: | + nox -s unit-${{ matrix.python }} + - name: Upload coverage results + uses: actions/upload-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-${{ matrix.python }} + + cover: + runs-on: ubuntu-latest + needs: + - unit + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install coverage + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install coverage + - name: Download coverage results + uses: actions/download-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-results/ + - name: Report coverage results + run: | + coverage combine .coverage-results/.coverage* + coverage report --show-missing --fail-under=100 diff --git a/.kokoro/release.sh b/.kokoro/release.sh index c593dd9d..fc6f0802 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") cd github/python-datastore python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 8571f251..6f858df9 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,8 +23,18 @@ env_vars: { value: "github/python-datastore/.kokoro/release.sh" } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google-cloud-pypi-token-keystore-1" + } + } +} + # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } diff --git a/.repo-metadata.json b/.repo-metadata.json index 5f4bae89..a5bf20b2 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -1,13 +1,16 @@ { - "name": "datastore", - "name_pretty": "Google Cloud Datastore", - "product_documentation": "https://cloud.google.com/datastore", - "client_documentation": "https://googleapis.dev/python/datastore/latest", - "issue_tracker": "https://issuetracker.google.com/savedsearches/559768", - "release_level": "ga", - "language": "python", - "library_type": "GAPIC_COMBO", - "repo": "googleapis/python-datastore", - "distribution_name": "google-cloud-datastore", - "api_id": "datastore.googleapis.com" -} \ No newline at end of file + "name": "datastore", + "name_pretty": "Google Cloud Datastore", + "product_documentation": "https://cloud.google.com/datastore", + "client_documentation": "https://cloud.google.com/python/docs/reference/datastore/latest", + "issue_tracker": "https://issuetracker.google.com/savedsearches/559768", + "release_level": "stable", + "language": "python", + "library_type": "GAPIC_COMBO", + "repo": "googleapis/python-datastore", + "distribution_name": "google-cloud-datastore", + "api_id": "datastore.googleapis.com", + "default_version": "v1", + "codeowner_team": "@googleapis/firestore-dpe", + "api_shortname": "datastore" +} diff --git a/CHANGELOG.md b/CHANGELOG.md index 089d3abc..0acb94a4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,25 @@ [1]: https://pypi.org/project/google-cloud-datastore/#history +## [2.5.0](https://github.com/googleapis/python-datastore/compare/v2.4.0...v2.5.0) (2022-02-26) + + +### Features + +* add api key support ([e166d7b](https://github.com/googleapis/python-datastore/commit/e166d7b3bc5b70d668df19e4b3a6d63b7c9c6599)) +* define Datastore -> Firestore in Datastore mode migration long running operation metadata ([#270](https://github.com/googleapis/python-datastore/issues/270)) ([e166d7b](https://github.com/googleapis/python-datastore/commit/e166d7b3bc5b70d668df19e4b3a6d63b7c9c6599)) + + +### Bug Fixes + +* **deps:** move libcst to extras ([#271](https://github.com/googleapis/python-datastore/issues/271)) ([d53fcce](https://github.com/googleapis/python-datastore/commit/d53fcce361d1585be9b0793fb6cc7fc4b27b07a7)) +* resolve DuplicateCredentialArgs error when using credentials_file ([e166d7b](https://github.com/googleapis/python-datastore/commit/e166d7b3bc5b70d668df19e4b3a6d63b7c9c6599)) + + +### Documentation + +* add generated snippets ([e166d7b](https://github.com/googleapis/python-datastore/commit/e166d7b3bc5b70d668df19e4b3a6d63b7c9c6599)) + ## [2.4.0](https://www.github.com/googleapis/python-datastore/compare/v2.3.0...v2.4.0) (2021-11-08) diff --git a/README.rst b/README.rst index b142bc22..2315c84a 100644 --- a/README.rst +++ b/README.rst @@ -20,7 +20,7 @@ all other queries. :target: https://pypi.org/project/google-cloud-datastore/ .. _Google Cloud Datastore API: https://cloud.google.com/datastore/docs .. _Product Documentation: https://cloud.google.com/datastore/docs -.. _Client Library Documentation: https://googleapis.dev/python/datastore/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/datastore/latest Quick Start ----------- diff --git a/UPGRADING.md b/UPGRADING.md index 517c39ad..8d8ec3bc 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -20,10 +20,10 @@ If you previously were using modules or functions under the namespace To assist with this, we have included some helpful scripts to make some of the code modifications required to use 2.0.0. -* Install the library +* Install the library with `libcst`. ```py -python3 -m pip install google-cloud-datastore +python3 -m pip install google-cloud-datastore[libcst] ``` * The scripts `fixup_datastore_v1_keywords.py` and `fixup_datastore_admin_v1_keywords.py` diff --git a/google/__init__.py b/google/__init__.py index 0e1bc513..4755e2b0 100644 --- a/google/__init__.py +++ b/google/__init__.py @@ -19,4 +19,4 @@ except ImportError: import pkgutil - __path__ = pkgutil.extend_path(__path__, __name__) + __path__ = pkgutil.extend_path(__path__, __name__) # type: ignore diff --git a/google/cloud/__init__.py b/google/cloud/__init__.py index 0e1bc513..4755e2b0 100644 --- a/google/cloud/__init__.py +++ b/google/cloud/__init__.py @@ -19,4 +19,4 @@ except ImportError: import pkgutil - __path__ = pkgutil.extend_path(__path__, __name__) + __path__ = pkgutil.extend_path(__path__, __name__) # type: ignore diff --git a/google/cloud/datastore/_http.py b/google/cloud/datastore/_http.py index 9ea5aac8..f92c76f0 100644 --- a/google/cloud/datastore/_http.py +++ b/google/cloud/datastore/_http.py @@ -17,7 +17,7 @@ from google.rpc import status_pb2 # type: ignore from google.cloud import _http as connection_module -from google.cloud import exceptions # type: ignore +from google.cloud import exceptions from google.cloud.datastore_v1.types import datastore as _datastore_pb2 diff --git a/google/cloud/datastore/client.py b/google/cloud/datastore/client.py index 207759cc..03829ce0 100644 --- a/google/cloud/datastore/client.py +++ b/google/cloud/datastore/client.py @@ -18,9 +18,9 @@ import google.api_core.client_options from google.auth.credentials import AnonymousCredentials # type: ignore -from google.cloud._helpers import _LocalStack # type: ignore -from google.cloud._helpers import _determine_default_project as _base_default_project # type: ignore -from google.cloud.client import ClientWithProject # type: ignore +from google.cloud._helpers import _LocalStack +from google.cloud._helpers import _determine_default_project as _base_default_project +from google.cloud.client import ClientWithProject from google.cloud.datastore.version import __version__ from google.cloud.datastore import helpers from google.cloud.datastore._http import HTTPDatastoreAPI diff --git a/google/cloud/datastore/version.py b/google/cloud/datastore/version.py index fe11624d..5836d805 100644 --- a/google/cloud/datastore/version.py +++ b/google/cloud/datastore/version.py @@ -12,4 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = "2.4.0" +__version__ = "2.5.0" diff --git a/google/cloud/datastore_admin_v1/__init__.py b/google/cloud/datastore_admin_v1/__init__.py index 70a79c07..4d0164cf 100644 --- a/google/cloud/datastore_admin_v1/__init__.py +++ b/google/cloud/datastore_admin_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -19,6 +19,7 @@ from .types.datastore_admin import CommonMetadata from .types.datastore_admin import CreateIndexRequest +from .types.datastore_admin import DatastoreFirestoreMigrationMetadata from .types.datastore_admin import DeleteIndexRequest from .types.datastore_admin import EntityFilter from .types.datastore_admin import ExportEntitiesMetadata @@ -33,12 +34,17 @@ from .types.datastore_admin import Progress from .types.datastore_admin import OperationType from .types.index import Index +from .types.migration import MigrationProgressEvent +from .types.migration import MigrationStateEvent +from .types.migration import MigrationState +from .types.migration import MigrationStep __all__ = ( "DatastoreAdminAsyncClient", "CommonMetadata", "CreateIndexRequest", "DatastoreAdminClient", + "DatastoreFirestoreMigrationMetadata", "DeleteIndexRequest", "EntityFilter", "ExportEntitiesMetadata", @@ -51,6 +57,10 @@ "IndexOperationMetadata", "ListIndexesRequest", "ListIndexesResponse", + "MigrationProgressEvent", + "MigrationState", + "MigrationStateEvent", + "MigrationStep", "OperationType", "Progress", ) diff --git a/google/cloud/datastore_admin_v1/services/__init__.py b/google/cloud/datastore_admin_v1/services/__init__.py index 4de65971..e8e1c384 100644 --- a/google/cloud/datastore_admin_v1/services/__init__.py +++ b/google/cloud/datastore_admin_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py b/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py index 951a69a9..6e5bb3d1 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py index e1d24d16..ebac62bd 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,17 +16,20 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -163,6 +166,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DatastoreAdminClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> DatastoreAdminTransport: """Returns the transport used by the client instance. @@ -228,7 +267,7 @@ async def export_entities( request: Union[datastore_admin.ExportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Sequence[datastore_admin.ExportEntitiesRequest.LabelsEntry] = None, + labels: Dict[str, str] = None, entity_filter: datastore_admin.EntityFilter = None, output_url_prefix: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -246,6 +285,31 @@ async def export_entities( before completion it may leave partial data behind in Google Cloud Storage. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_export_entities(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.ExportEntitiesRequest( + project_id="project_id_value", + output_url_prefix="output_url_prefix_value", + ) + + # Make the request + operation = client.export_entities(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest, dict]): The request object. The request for @@ -257,7 +321,7 @@ async def export_entities( This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Sequence[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest.LabelsEntry]`): + labels (:class:`Dict[str, str]`): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -313,7 +377,7 @@ async def export_entities( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any( [project_id, labels, entity_filter, output_url_prefix] @@ -365,7 +429,7 @@ async def import_entities( request: Union[datastore_admin.ImportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Sequence[datastore_admin.ImportEntitiesRequest.LabelsEntry] = None, + labels: Dict[str, str] = None, input_url: str = None, entity_filter: datastore_admin.EntityFilter = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -380,6 +444,31 @@ async def import_entities( is possible that a subset of the data has already been imported to Cloud Datastore. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_import_entities(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.ImportEntitiesRequest( + project_id="project_id_value", + input_url="input_url_value", + ) + + # Make the request + operation = client.import_entities(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest, dict]): The request object. The request for @@ -391,7 +480,7 @@ async def import_entities( This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (:class:`Sequence[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest.LabelsEntry]`): + labels (:class:`Dict[str, str]`): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -453,7 +542,7 @@ async def import_entities( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, labels, input_url, entity_filter]) if request is not None and has_flattened_params: @@ -522,6 +611,29 @@ async def create_index( Indexes with a single property cannot be created. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_create_index(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.CreateIndexRequest( + ) + + # Make the request + operation = client.create_index(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.CreateIndexRequest, dict]): The request object. The request for @@ -589,6 +701,29 @@ async def delete_index( [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] again. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_delete_index(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.DeleteIndexRequest( + ) + + # Make the request + operation = client.delete_index(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.DeleteIndexRequest, dict]): The request object. The request for @@ -643,6 +778,24 @@ async def get_index( ) -> index.Index: r"""Gets an index. + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_get_index(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.GetIndexRequest( + ) + + # Make the request + response = client.get_index(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.GetIndexRequest, dict]): The request object. The request for @@ -697,6 +850,26 @@ async def list_indexes( the list of indexes and may occasionally return stale results. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_list_indexes(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.ListIndexesRequest( + ) + + # Make the request + page_result = client.list_indexes(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.ListIndexesRequest, dict]): The request object. The request for diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py index b8ca70c4..4f4f9211 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/client.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api_core import operation # type: ignore from google.api_core import operation_async # type: ignore @@ -275,6 +277,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -325,50 +394,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, DatastoreAdminTransport): # transport is a DatastoreAdminTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -380,6 +421,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, @@ -397,7 +447,7 @@ def export_entities( request: Union[datastore_admin.ExportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Sequence[datastore_admin.ExportEntitiesRequest.LabelsEntry] = None, + labels: Dict[str, str] = None, entity_filter: datastore_admin.EntityFilter = None, output_url_prefix: str = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -415,6 +465,31 @@ def export_entities( before completion it may leave partial data behind in Google Cloud Storage. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_export_entities(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.ExportEntitiesRequest( + project_id="project_id_value", + output_url_prefix="output_url_prefix_value", + ) + + # Make the request + operation = client.export_entities(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest, dict]): The request object. The request for @@ -426,7 +501,7 @@ def export_entities( This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Sequence[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest.LabelsEntry]): + labels (Dict[str, str]): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -482,7 +557,7 @@ def export_entities( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any( [project_id, labels, entity_filter, output_url_prefix] @@ -533,7 +608,7 @@ def import_entities( request: Union[datastore_admin.ImportEntitiesRequest, dict] = None, *, project_id: str = None, - labels: Sequence[datastore_admin.ImportEntitiesRequest.LabelsEntry] = None, + labels: Dict[str, str] = None, input_url: str = None, entity_filter: datastore_admin.EntityFilter = None, retry: OptionalRetry = gapic_v1.method.DEFAULT, @@ -548,6 +623,31 @@ def import_entities( is possible that a subset of the data has already been imported to Cloud Datastore. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_import_entities(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.ImportEntitiesRequest( + project_id="project_id_value", + input_url="input_url_value", + ) + + # Make the request + operation = client.import_entities(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest, dict]): The request object. The request for @@ -559,7 +659,7 @@ def import_entities( This corresponds to the ``project_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - labels (Sequence[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest.LabelsEntry]): + labels (Dict[str, str]): Client-assigned labels. This corresponds to the ``labels`` field on the ``request`` instance; if ``request`` is provided, this @@ -621,7 +721,7 @@ def import_entities( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, labels, input_url, entity_filter]) if request is not None and has_flattened_params: @@ -689,6 +789,29 @@ def create_index( Indexes with a single property cannot be created. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_create_index(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.CreateIndexRequest( + ) + + # Make the request + operation = client.create_index(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.CreateIndexRequest, dict]): The request object. The request for @@ -757,6 +880,29 @@ def delete_index( [delete][google.datastore.admin.v1.DatastoreAdmin.DeleteIndex] again. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_delete_index(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.DeleteIndexRequest( + ) + + # Make the request + operation = client.delete_index(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.DeleteIndexRequest, dict]): The request object. The request for @@ -812,6 +958,24 @@ def get_index( ) -> index.Index: r"""Gets an index. + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_get_index(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.GetIndexRequest( + ) + + # Make the request + response = client.get_index(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.GetIndexRequest, dict]): The request object. The request for @@ -857,6 +1021,26 @@ def list_indexes( the list of indexes and may occasionally return stale results. + + .. code-block:: python + + from google.cloud import datastore_admin_v1 + + def sample_list_indexes(): + # Create a client + client = datastore_admin_v1.DatastoreAdminClient() + + # Initialize request argument(s) + request = datastore_admin_v1.ListIndexesRequest( + ) + + # Make the request + page_result = client.list_indexes(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.datastore_admin_v1.types.ListIndexesRequest, dict]): The request object. The request for diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py b/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py index a2f14858..9a2d05ad 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/pagers.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py index 376bbfa1..7d7ea9d4 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py index 8fc75028..1b47ae2b 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,11 +18,11 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.api_core import operations_v1 # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -107,7 +107,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py index 07db8479..e27734f8 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,9 +16,9 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import operations_v1 # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import operations_v1 +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -219,8 +219,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -293,7 +296,7 @@ def operations_client(self) -> operations_v1.OperationsClient: This property caches on the instance; repeated calls return the same client. """ - # Sanity check: Only create a new client if we do not already have one. + # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsClient(self.grpc_channel) diff --git a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py index 8a1f1a54..46f84887 100644 --- a/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py +++ b/google/cloud/datastore_admin_v1/services/datastore_admin/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,9 +16,9 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.api_core import operations_v1 from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -264,8 +264,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -295,7 +298,7 @@ def operations_client(self) -> operations_v1.OperationsAsyncClient: This property caches on the instance; repeated calls return the same client. """ - # Sanity check: Only create a new client if we do not already have one. + # Quick check: Only create a new client if we do not already have one. if self._operations_client is None: self._operations_client = operations_v1.OperationsAsyncClient( self.grpc_channel diff --git a/google/cloud/datastore_admin_v1/types/__init__.py b/google/cloud/datastore_admin_v1/types/__init__.py index ac4ff905..fbc4f65f 100644 --- a/google/cloud/datastore_admin_v1/types/__init__.py +++ b/google/cloud/datastore_admin_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,6 +16,7 @@ from .datastore_admin import ( CommonMetadata, CreateIndexRequest, + DatastoreFirestoreMigrationMetadata, DeleteIndexRequest, EntityFilter, ExportEntitiesMetadata, @@ -31,10 +32,17 @@ OperationType, ) from .index import Index +from .migration import ( + MigrationProgressEvent, + MigrationStateEvent, + MigrationState, + MigrationStep, +) __all__ = ( "CommonMetadata", "CreateIndexRequest", + "DatastoreFirestoreMigrationMetadata", "DeleteIndexRequest", "EntityFilter", "ExportEntitiesMetadata", @@ -49,4 +57,8 @@ "Progress", "OperationType", "Index", + "MigrationProgressEvent", + "MigrationStateEvent", + "MigrationState", + "MigrationStep", ) diff --git a/google/cloud/datastore_admin_v1/types/datastore_admin.py b/google/cloud/datastore_admin_v1/types/datastore_admin.py index 0f4546fd..4e5ad0da 100644 --- a/google/cloud/datastore_admin_v1/types/datastore_admin.py +++ b/google/cloud/datastore_admin_v1/types/datastore_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,6 +16,7 @@ import proto # type: ignore from google.cloud.datastore_admin_v1.types import index as gda_index +from google.cloud.datastore_admin_v1.types import migration from google.protobuf import timestamp_pb2 # type: ignore @@ -37,6 +38,7 @@ "ListIndexesRequest", "ListIndexesResponse", "IndexOperationMetadata", + "DatastoreFirestoreMigrationMetadata", }, ) @@ -62,7 +64,7 @@ class CommonMetadata(proto.Message): operation_type (google.cloud.datastore_admin_v1.types.OperationType): The type of the operation. Can be used as a filter in ListOperationsRequest. - labels (Sequence[google.cloud.datastore_admin_v1.types.CommonMetadata.LabelsEntry]): + labels (Dict[str, str]): The client-assigned labels which were provided when the operation was created. May also include additional labels. @@ -113,7 +115,7 @@ class ExportEntitiesRequest(proto.Message): project_id (str): Required. Project ID against which to make the request. - labels (Sequence[google.cloud.datastore_admin_v1.types.ExportEntitiesRequest.LabelsEntry]): + labels (Dict[str, str]): Client-assigned labels. entity_filter (google.cloud.datastore_admin_v1.types.EntityFilter): Description of what data from the project is @@ -157,7 +159,7 @@ class ImportEntitiesRequest(proto.Message): project_id (str): Required. Project ID against which to make the request. - labels (Sequence[google.cloud.datastore_admin_v1.types.ImportEntitiesRequest.LabelsEntry]): + labels (Dict[str, str]): Client-assigned labels. input_url (str): Required. The full resource URL of the external storage @@ -410,4 +412,27 @@ class IndexOperationMetadata(proto.Message): index_id = proto.Field(proto.STRING, number=3,) +class DatastoreFirestoreMigrationMetadata(proto.Message): + r"""Metadata for Datastore to Firestore migration operations. + + The DatastoreFirestoreMigration operation is not started by the + end-user via an explicit "creation" method. This is an intentional + deviation from the LRO design pattern. + + This singleton resource can be accessed at: + ``projects/{project_id}/datastore-firestore-migration`` + + Attributes: + migration_state (google.cloud.datastore_admin_v1.types.MigrationState): + The current state of migration from Cloud + Datastore to Cloud Firestore in Datastore mode. + migration_step (google.cloud.datastore_admin_v1.types.MigrationStep): + The current step of migration from Cloud + Datastore to Cloud Firestore in Datastore mode. + """ + + migration_state = proto.Field(proto.ENUM, number=1, enum=migration.MigrationState,) + migration_step = proto.Field(proto.ENUM, number=2, enum=migration.MigrationStep,) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datastore_admin_v1/types/index.py b/google/cloud/datastore_admin_v1/types/index.py index b372cccf..8d50f03a 100644 --- a/google/cloud/datastore_admin_v1/types/index.py +++ b/google/cloud/datastore_admin_v1/types/index.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_admin_v1/types/migration.py b/google/cloud/datastore_admin_v1/types/migration.py new file mode 100644 index 00000000..18cdd8d6 --- /dev/null +++ b/google/cloud/datastore_admin_v1/types/migration.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.datastore.admin.v1", + manifest={ + "MigrationState", + "MigrationStep", + "MigrationStateEvent", + "MigrationProgressEvent", + }, +) + + +class MigrationState(proto.Enum): + r"""States for a migration.""" + MIGRATION_STATE_UNSPECIFIED = 0 + RUNNING = 1 + PAUSED = 2 + COMPLETE = 3 + + +class MigrationStep(proto.Enum): + r"""Steps in a migration.""" + MIGRATION_STEP_UNSPECIFIED = 0 + PREPARE = 6 + START = 1 + APPLY_WRITES_SYNCHRONOUSLY = 7 + COPY_AND_VERIFY = 2 + REDIRECT_EVENTUALLY_CONSISTENT_READS = 3 + REDIRECT_STRONGLY_CONSISTENT_READS = 4 + REDIRECT_WRITES = 5 + + +class MigrationStateEvent(proto.Message): + r"""An event signifying a change in state of a `migration from Cloud + Datastore to Cloud Firestore in Datastore + mode `__. + + Attributes: + state (google.cloud.datastore_admin_v1.types.MigrationState): + The new state of the migration. + """ + + state = proto.Field(proto.ENUM, number=1, enum="MigrationState",) + + +class MigrationProgressEvent(proto.Message): + r"""An event signifying the start of a new step in a `migration from + Cloud Datastore to Cloud Firestore in Datastore + mode `__. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + step (google.cloud.datastore_admin_v1.types.MigrationStep): + The step that is starting. + + An event with step set to ``START`` indicates that the + migration has been reverted back to the initial + pre-migration state. + prepare_step_details (google.cloud.datastore_admin_v1.types.MigrationProgressEvent.PrepareStepDetails): + Details for the ``PREPARE`` step. + + This field is a member of `oneof`_ ``step_details``. + redirect_writes_step_details (google.cloud.datastore_admin_v1.types.MigrationProgressEvent.RedirectWritesStepDetails): + Details for the ``REDIRECT_WRITES`` step. + + This field is a member of `oneof`_ ``step_details``. + """ + + class ConcurrencyMode(proto.Enum): + r"""Concurrency modes for transactions in Cloud Firestore.""" + CONCURRENCY_MODE_UNSPECIFIED = 0 + PESSIMISTIC = 1 + OPTIMISTIC = 2 + + class PrepareStepDetails(proto.Message): + r"""Details for the ``PREPARE`` step. + + Attributes: + concurrency_mode (google.cloud.datastore_admin_v1.types.MigrationProgressEvent.ConcurrencyMode): + The concurrency mode this database will use when it reaches + the ``REDIRECT_WRITES`` step. + """ + + concurrency_mode = proto.Field( + proto.ENUM, number=1, enum="MigrationProgressEvent.ConcurrencyMode", + ) + + class RedirectWritesStepDetails(proto.Message): + r"""Details for the ``REDIRECT_WRITES`` step. + + Attributes: + concurrency_mode (google.cloud.datastore_admin_v1.types.MigrationProgressEvent.ConcurrencyMode): + Ths concurrency mode for this database. + """ + + concurrency_mode = proto.Field( + proto.ENUM, number=1, enum="MigrationProgressEvent.ConcurrencyMode", + ) + + step = proto.Field(proto.ENUM, number=1, enum="MigrationStep",) + prepare_step_details = proto.Field( + proto.MESSAGE, number=2, oneof="step_details", message=PrepareStepDetails, + ) + redirect_writes_step_details = proto.Field( + proto.MESSAGE, + number=3, + oneof="step_details", + message=RedirectWritesStepDetails, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/datastore_v1/__init__.py b/google/cloud/datastore_v1/__init__.py index 247eec15..881df4ca 100644 --- a/google/cloud/datastore_v1/__init__.py +++ b/google/cloud/datastore_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/__init__.py b/google/cloud/datastore_v1/services/__init__.py index 4de65971..e8e1c384 100644 --- a/google/cloud/datastore_v1/services/__init__.py +++ b/google/cloud/datastore_v1/services/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/datastore/__init__.py b/google/cloud/datastore_v1/services/datastore/__init__.py index 611f280b..66d6560d 100644 --- a/google/cloud/datastore_v1/services/datastore/__init__.py +++ b/google/cloud/datastore_v1/services/datastore/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/datastore/async_client.py b/google/cloud/datastore_v1/services/datastore/async_client.py index ca6beef2..c6f8431b 100644 --- a/google/cloud/datastore_v1/services/datastore/async_client.py +++ b/google/cloud/datastore_v1/services/datastore/async_client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,17 +16,20 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity @@ -103,6 +106,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DatastoreClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> DatastoreTransport: """Returns the transport used by the client instance. @@ -176,6 +215,25 @@ async def lookup( ) -> datastore.LookupResponse: r"""Looks up entities by key. + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_lookup(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.LookupRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.lookup(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.LookupRequest, dict]): The request object. The request for @@ -212,7 +270,7 @@ async def lookup( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, read_options, keys]) if request is not None and has_flattened_params: @@ -266,6 +324,25 @@ async def run_query( ) -> datastore.RunQueryResponse: r"""Queries for entities. + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_run_query(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.RunQueryRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.run_query(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.RunQueryRequest, dict]): The request object. The request for @@ -320,6 +397,25 @@ async def begin_transaction( ) -> datastore.BeginTransactionResponse: r"""Begins a new transaction. + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_begin_transaction(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.BeginTransactionRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.begin_transaction(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.BeginTransactionRequest, dict]): The request object. The request for @@ -344,7 +440,7 @@ async def begin_transaction( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id]) if request is not None and has_flattened_params: @@ -389,6 +485,27 @@ async def commit( r"""Commits a transaction, optionally creating, deleting or modifying some entities. + + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_commit(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.CommitRequest( + transaction=b'transaction_blob', + project_id="project_id_value", + ) + + # Make the request + response = client.commit(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.CommitRequest, dict]): The request object. The request for @@ -448,7 +565,7 @@ async def commit( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, mode, transaction, mutations]) if request is not None and has_flattened_params: @@ -496,6 +613,26 @@ async def rollback( ) -> datastore.RollbackResponse: r"""Rolls back a transaction. + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_rollback(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.RollbackRequest( + project_id="project_id_value", + transaction=b'transaction_blob', + ) + + # Make the request + response = client.rollback(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.RollbackRequest, dict]): The request object. The request for @@ -528,7 +665,7 @@ async def rollback( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, transaction]) if request is not None and has_flattened_params: @@ -573,6 +710,26 @@ async def allocate_ids( r"""Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. + + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_allocate_ids(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.AllocateIdsRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.allocate_ids(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.AllocateIdsRequest, dict]): The request object. The request for @@ -606,7 +763,7 @@ async def allocate_ids( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, keys]) if request is not None and has_flattened_params: @@ -648,8 +805,28 @@ async def reserve_ids( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.ReserveIdsResponse: - r"""Prevents the supplied keys' IDs from being auto- - llocated by Cloud Datastore. + r"""Prevents the supplied keys' IDs from being + auto-allocated by Cloud Datastore. + + + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_reserve_ids(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.ReserveIdsRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.reserve_ids(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.datastore_v1.types.ReserveIdsRequest, dict]): @@ -683,7 +860,7 @@ async def reserve_ids( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, keys]) if request is not None and has_flattened_params: diff --git a/google/cloud/datastore_v1/services/datastore/client.py b/google/cloud/datastore_v1/services/datastore/client.py index 4c53cc1f..49c741de 100644 --- a/google/cloud/datastore_v1/services/datastore/client.py +++ b/google/cloud/datastore_v1/services/datastore/client.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.datastore_v1.types import datastore from google.cloud.datastore_v1.types import entity @@ -222,6 +224,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -272,50 +341,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, DatastoreTransport): # transport is a DatastoreTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -327,6 +368,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, @@ -352,6 +402,25 @@ def lookup( ) -> datastore.LookupResponse: r"""Looks up entities by key. + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_lookup(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.LookupRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.lookup(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.LookupRequest, dict]): The request object. The request for @@ -388,7 +457,7 @@ def lookup( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, read_options, keys]) if request is not None and has_flattened_params: @@ -432,6 +501,25 @@ def run_query( ) -> datastore.RunQueryResponse: r"""Queries for entities. + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_run_query(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.RunQueryRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.run_query(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.RunQueryRequest, dict]): The request object. The request for @@ -477,6 +565,25 @@ def begin_transaction( ) -> datastore.BeginTransactionResponse: r"""Begins a new transaction. + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_begin_transaction(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.BeginTransactionRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.begin_transaction(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.BeginTransactionRequest, dict]): The request object. The request for @@ -501,7 +608,7 @@ def begin_transaction( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id]) if request is not None and has_flattened_params: @@ -546,6 +653,27 @@ def commit( r"""Commits a transaction, optionally creating, deleting or modifying some entities. + + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_commit(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.CommitRequest( + transaction=b'transaction_blob', + project_id="project_id_value", + ) + + # Make the request + response = client.commit(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.CommitRequest, dict]): The request object. The request for @@ -605,7 +733,7 @@ def commit( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, mode, transaction, mutations]) if request is not None and has_flattened_params: @@ -653,6 +781,26 @@ def rollback( ) -> datastore.RollbackResponse: r"""Rolls back a transaction. + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_rollback(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.RollbackRequest( + project_id="project_id_value", + transaction=b'transaction_blob', + ) + + # Make the request + response = client.rollback(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.RollbackRequest, dict]): The request object. The request for @@ -685,7 +833,7 @@ def rollback( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, transaction]) if request is not None and has_flattened_params: @@ -730,6 +878,26 @@ def allocate_ids( r"""Allocates IDs for the given keys, which is useful for referencing an entity before it is inserted. + + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_allocate_ids(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.AllocateIdsRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.allocate_ids(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.datastore_v1.types.AllocateIdsRequest, dict]): The request object. The request for @@ -763,7 +931,7 @@ def allocate_ids( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, keys]) if request is not None and has_flattened_params: @@ -805,8 +973,28 @@ def reserve_ids( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> datastore.ReserveIdsResponse: - r"""Prevents the supplied keys' IDs from being auto- - llocated by Cloud Datastore. + r"""Prevents the supplied keys' IDs from being + auto-allocated by Cloud Datastore. + + + .. code-block:: python + + from google.cloud import datastore_v1 + + def sample_reserve_ids(): + # Create a client + client = datastore_v1.DatastoreClient() + + # Initialize request argument(s) + request = datastore_v1.ReserveIdsRequest( + project_id="project_id_value", + ) + + # Make the request + response = client.reserve_ids(request=request) + + # Handle the response + print(response) Args: request (Union[google.cloud.datastore_v1.types.ReserveIdsRequest, dict]): @@ -840,7 +1028,7 @@ def reserve_ids( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([project_id, keys]) if request is not None and has_flattened_params: diff --git a/google/cloud/datastore_v1/services/datastore/transports/__init__.py b/google/cloud/datastore_v1/services/datastore/transports/__init__.py index 41074a07..b7d617f6 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/__init__.py +++ b/google/cloud/datastore_v1/services/datastore/transports/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/services/datastore/transports/base.py b/google/cloud/datastore_v1/services/datastore/transports/base.py index 7959b72e..487a1a45 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/base.py +++ b/google/cloud/datastore_v1/services/datastore/transports/base.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore @@ -102,7 +102,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/datastore_v1/services/datastore/transports/grpc.py b/google/cloud/datastore_v1/services/datastore/transports/grpc.py index afcc6a15..410aa89d 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/grpc.py +++ b/google/cloud/datastore_v1/services/datastore/transports/grpc.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -164,8 +164,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -393,8 +396,8 @@ def reserve_ids( ) -> Callable[[datastore.ReserveIdsRequest], datastore.ReserveIdsResponse]: r"""Return a callable for the reserve ids method over gRPC. - Prevents the supplied keys' IDs from being auto- - llocated by Cloud Datastore. + Prevents the supplied keys' IDs from being + auto-allocated by Cloud Datastore. Returns: Callable[[~.ReserveIdsRequest], diff --git a/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py b/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py index 20c51f7c..f539e84f 100644 --- a/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py +++ b/google/cloud/datastore_v1/services/datastore/transports/grpc_asyncio.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -209,8 +209,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, @@ -404,8 +407,8 @@ def reserve_ids( ]: r"""Return a callable for the reserve ids method over gRPC. - Prevents the supplied keys' IDs from being auto- - llocated by Cloud Datastore. + Prevents the supplied keys' IDs from being + auto-allocated by Cloud Datastore. Returns: Callable[[~.ReserveIdsRequest], diff --git a/google/cloud/datastore_v1/types/__init__.py b/google/cloud/datastore_v1/types/__init__.py index 7553ac77..eb4fc8c2 100644 --- a/google/cloud/datastore_v1/types/__init__.py +++ b/google/cloud/datastore_v1/types/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/datastore_v1/types/datastore.py b/google/cloud/datastore_v1/types/datastore.py index a36a7293..e77ad1e9 100644 --- a/google/cloud/datastore_v1/types/datastore.py +++ b/google/cloud/datastore_v1/types/datastore.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -115,9 +115,11 @@ class RunQueryRequest(proto.Message): The options for this query. query (google.cloud.datastore_v1.types.Query): The query to run. + This field is a member of `oneof`_ ``query_type``. gql_query (google.cloud.datastore_v1.types.GqlQuery): The GQL query to run. + This field is a member of `oneof`_ ``query_type``. """ @@ -221,6 +223,7 @@ class CommitRequest(proto.Message): The identifier of the transaction associated with the commit. A transaction identifier is returned by a call to [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This field is a member of `oneof`_ ``transaction_selector``. mutations (Sequence[google.cloud.datastore_v1.types.Mutation]): The mutations to perform. @@ -316,8 +319,8 @@ class ReserveIdsRequest(proto.Message): which to make the request. keys (Sequence[google.cloud.datastore_v1.types.Key]): Required. A list of keys with complete key - paths whose numeric IDs should not be auto- - allocated. + paths whose numeric IDs should not be + auto-allocated. """ project_id = proto.Field(proto.STRING, number=8,) @@ -347,27 +350,32 @@ class Mutation(proto.Message): The entity to insert. The entity must not already exist. The entity key's final path element may be incomplete. + This field is a member of `oneof`_ ``operation``. update (google.cloud.datastore_v1.types.Entity): The entity to update. The entity must already exist. Must have a complete key path. + This field is a member of `oneof`_ ``operation``. upsert (google.cloud.datastore_v1.types.Entity): The entity to upsert. The entity may or may not already exist. The entity key's final path element may be incomplete. + This field is a member of `oneof`_ ``operation``. delete (google.cloud.datastore_v1.types.Key): The key of the entity to delete. The entity may or may not already exist. Must have a - complete key path and must not be reserved/read- - only. + complete key path and must not be + reserved/read-only. + This field is a member of `oneof`_ ``operation``. base_version (int): The version of the entity that this mutation is being applied to. If this does not match the current version on the server, the mutation conflicts. + This field is a member of `oneof`_ ``conflict_detection_strategy``. """ @@ -429,11 +437,13 @@ class ReadOptions(proto.Message): read_consistency (google.cloud.datastore_v1.types.ReadOptions.ReadConsistency): The non-transactional read consistency to use. Cannot be set to ``STRONG`` for global queries. + This field is a member of `oneof`_ ``consistency_type``. transaction (bytes): The identifier of the transaction in which to read. A transaction identifier is returned by a call to [Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction]. + This field is a member of `oneof`_ ``consistency_type``. """ @@ -469,9 +479,11 @@ class TransactionOptions(proto.Message): read_write (google.cloud.datastore_v1.types.TransactionOptions.ReadWrite): The transaction should allow both reads and writes. + This field is a member of `oneof`_ ``mode``. read_only (google.cloud.datastore_v1.types.TransactionOptions.ReadOnly): The transaction should only allow reads. + This field is a member of `oneof`_ ``mode``. """ diff --git a/google/cloud/datastore_v1/types/entity.py b/google/cloud/datastore_v1/types/entity.py index 8ff844f7..1c432ee6 100644 --- a/google/cloud/datastore_v1/types/entity.py +++ b/google/cloud/datastore_v1/types/entity.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -117,11 +117,13 @@ class PathElement(proto.Message): Never equal to zero. Values less than zero are discouraged and may not be supported in the future. + This field is a member of `oneof`_ ``id_type``. name (str): The name of the entity. A name matching regex ``__.*__`` is reserved/read-only. A name must not be more than 1500 bytes when UTF-8 encoded. Cannot be ``""``. + This field is a member of `oneof`_ ``id_type``. """ @@ -160,49 +162,60 @@ class Value(proto.Message): Attributes: null_value (google.protobuf.struct_pb2.NullValue): A null value. + This field is a member of `oneof`_ ``value_type``. boolean_value (bool): A boolean value. + This field is a member of `oneof`_ ``value_type``. integer_value (int): An integer value. + This field is a member of `oneof`_ ``value_type``. double_value (float): A double value. + This field is a member of `oneof`_ ``value_type``. timestamp_value (google.protobuf.timestamp_pb2.Timestamp): A timestamp value. When stored in the Datastore, precise only to microseconds; any additional precision is rounded down. + This field is a member of `oneof`_ ``value_type``. key_value (google.cloud.datastore_v1.types.Key): A key value. + This field is a member of `oneof`_ ``value_type``. string_value (str): A UTF-8 encoded string value. When ``exclude_from_indexes`` is false (it is indexed), may have at most 1500 bytes. Otherwise, may be set to at most 1,000,000 bytes. + This field is a member of `oneof`_ ``value_type``. blob_value (bytes): A blob value. May have at most 1,000,000 bytes. When ``exclude_from_indexes`` is false, may have at most 1500 bytes. In JSON requests, must be base64-encoded. + This field is a member of `oneof`_ ``value_type``. geo_point_value (google.type.latlng_pb2.LatLng): A geo point value representing a point on the surface of Earth. + This field is a member of `oneof`_ ``value_type``. entity_value (google.cloud.datastore_v1.types.Entity): An entity value. - May have no key. - May have a key with an incomplete key path. - May have a reserved/read-only key. + This field is a member of `oneof`_ ``value_type``. array_value (google.cloud.datastore_v1.types.ArrayValue): An array value. Cannot contain another array value. A ``Value`` instance that sets field ``array_value`` must not set fields ``meaning`` or ``exclude_from_indexes``. + This field is a member of `oneof`_ ``value_type``. meaning (int): The ``meaning`` field should only be populated for backwards diff --git a/google/cloud/datastore_v1/types/query.py b/google/cloud/datastore_v1/types/query.py index 1c69e89f..46147f05 100644 --- a/google/cloud/datastore_v1/types/query.py +++ b/google/cloud/datastore_v1/types/query.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -203,9 +203,11 @@ class Filter(proto.Message): Attributes: composite_filter (google.cloud.datastore_v1.types.CompositeFilter): A composite filter. + This field is a member of `oneof`_ ``filter_type``. property_filter (google.cloud.datastore_v1.types.PropertyFilter): A filter on a property. + This field is a member of `oneof`_ ``filter_type``. """ @@ -318,10 +320,12 @@ class GqlQueryParameter(proto.Message): Attributes: value (google.cloud.datastore_v1.types.Value): A value parameter. + This field is a member of `oneof`_ ``parameter_type``. cursor (bytes): A query cursor. Query cursors are returned in query result batches. + This field is a member of `oneof`_ ``parameter_type``. """ diff --git a/mypy.ini b/mypy.ini index 5663b40d..17f8a8f6 100644 --- a/mypy.ini +++ b/mypy.ini @@ -3,5 +3,5 @@ python_version = 3.6 namespace_packages = True ignore_missing_imports = True -[mypy-google.protobuf] -ignore_missing_imports = True +[mypy-google.cloud.datastore._app_engine_key_pb2] +ignore_errors = True diff --git a/noxfile.py b/noxfile.py index 2510a58b..c8cc8070 100644 --- a/noxfile.py +++ b/noxfile.py @@ -77,9 +77,10 @@ def blacken(session): def mypy(session): """Verify type hints are mypy compatible.""" session.install("-e", ".") - session.install("mypy") - # TODO: also verify types on tests, all of google package - session.run("mypy", "-p", "google.cloud.datastore", "--no-incremental") + session.install( + "mypy", "types-setuptools", "types-mock", "types-protobuf", "types-requests" + ) + session.run("mypy", "google/", "tests/") @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/owlbot.py b/owlbot.py index e5b43cca..fbf8c131 100644 --- a/owlbot.py +++ b/owlbot.py @@ -22,6 +22,7 @@ common = gcp.CommonTemplates() + # This is a customized version of the s.get_staging_dirs() function from synthtool to # cater for copying 2 different folders from googleapis-gen # which are datastore and datastore/admin @@ -60,6 +61,7 @@ def get_staging_dirs( else: return [] + # This library ships clients for two different APIs, # Datastore and Datastore Admin datastore_default_version = "v1" @@ -197,6 +199,17 @@ def docfx(session): """, ) +# Work around: https://github.com/googleapis/gapic-generator-python/issues/689 +s.replace( + [ + "google/**/datastore_admin/async_client.py", + "google/**/datastore_admin/client.py", + "google/**/types/datastore_admin.py", + ], + r"Sequence\[.*\.LabelsEntry\]", + r"Dict[str, str]", +) + # Add documentation about creating indexes and populating data for system # tests. assert 1 == s.replace( @@ -240,7 +253,8 @@ def docfx(session): ) # add type checker nox session -s.replace("noxfile.py", +s.replace( + "noxfile.py", """nox.options.sessions = \[ "unit", "system",""", @@ -262,9 +276,10 @@ def lint_setup_py\(session\): def mypy(session): """Verify type hints are mypy compatible.""" session.install("-e", ".") - session.install("mypy") - # TODO: also verify types on tests, all of google package - session.run("mypy", "-p", "google.cloud.datastore", "--no-incremental") + session.install( + "mypy", "types-setuptools", "types-mock", "types-protobuf", "types-requests" + ) + session.run("mypy", "google/", "tests/") @nox.session(python=DEFAULT_PYTHON_VERSION) diff --git a/scripts/fixup_datastore_admin_v1_keywords.py b/scripts/fixup_datastore_admin_v1_keywords.py index 12e217de..49b96026 100644 --- a/scripts/fixup_datastore_admin_v1_keywords.py +++ b/scripts/fixup_datastore_admin_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/scripts/fixup_datastore_v1_keywords.py b/scripts/fixup_datastore_v1_keywords.py index e0358795..4f5265b6 100644 --- a/scripts/fixup_datastore_v1_keywords.py +++ b/scripts/fixup_datastore_v1_keywords.py @@ -1,6 +1,6 @@ #! /usr/bin/env python3 # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/setup.py b/setup.py index 286653d5..6d24d2ee 100644 --- a/setup.py +++ b/setup.py @@ -38,9 +38,8 @@ # https://github.com/googleapis/google-cloud-python/issues/10566 "google-cloud-core >= 1.4.0, <3.0.0dev", "proto-plus >= 1.4.0", - "libcst >= 0.2.5", ] -extras = {} +extras = {"libcst": "libcst >= 0.2.5"} # Setup boilerplate below this line. diff --git a/tests/__init__.py b/tests/__init__.py index 4de65971..e8e1c384 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py index 4de65971..e8e1c384 100644 --- a/tests/unit/__init__.py +++ b/tests/unit/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/__init__.py b/tests/unit/gapic/__init__.py index 4de65971..e8e1c384 100644 --- a/tests/unit/gapic/__init__.py +++ b/tests/unit/gapic/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/datastore_admin_v1/__init__.py b/tests/unit/gapic/datastore_admin_v1/__init__.py index 4de65971..e8e1c384 100644 --- a/tests/unit/gapic/datastore_admin_v1/__init__.py +++ b/tests/unit/gapic/datastore_admin_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py index a8f4a7b6..e6ed5508 100644 --- a/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py +++ b/tests/unit/gapic/datastore_admin_v1/test_datastore_admin.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -29,6 +29,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async +from google.api_core import operation from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.api_core import path_template @@ -255,20 +256,20 @@ def test_datastore_admin_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -327,7 +328,7 @@ def test_datastore_admin_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -404,6 +405,87 @@ def test_datastore_admin_client_mtls_env_auto( ) +@pytest.mark.parametrize( + "client_class", [DatastoreAdminClient, DatastoreAdminAsyncClient] +) +@mock.patch.object( + DatastoreAdminClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DatastoreAdminClient), +) +@mock.patch.object( + DatastoreAdminAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DatastoreAdminAsyncClient), +) +def test_datastore_admin_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -422,7 +504,7 @@ def test_datastore_admin_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -436,24 +518,31 @@ def test_datastore_admin_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport, "grpc"), + ( + DatastoreAdminClient, + transports.DatastoreAdminGrpcTransport, + "grpc", + grpc_helpers, + ), ( DatastoreAdminAsyncClient, transports.DatastoreAdminGrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_datastore_admin_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -486,9 +575,77 @@ def test_datastore_admin_client_client_options_from_dict(): ) -def test_export_entities( - transport: str = "grpc", request_type=datastore_admin.ExportEntitiesRequest +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DatastoreAdminClient, + transports.DatastoreAdminGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DatastoreAdminAsyncClient, + transports.DatastoreAdminGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_datastore_admin_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers ): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "datastore.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + scopes=None, + default_host="datastore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [datastore_admin.ExportEntitiesRequest, dict,]) +def test_export_entities(request_type, transport: str = "grpc"): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -512,10 +669,6 @@ def test_export_entities( assert isinstance(response, future.Future) -def test_export_entities_from_dict(): - test_export_entities(request_type=dict) - - def test_export_entities_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -585,12 +738,18 @@ def test_export_entities_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].labels == {"key_value": "value_value"} - assert args[0].entity_filter == datastore_admin.EntityFilter( - kinds=["kinds_value"] - ) - assert args[0].output_url_prefix == "output_url_prefix_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].labels + mock_val = {"key_value": "value_value"} + assert arg == mock_val + arg = args[0].entity_filter + mock_val = datastore_admin.EntityFilter(kinds=["kinds_value"]) + assert arg == mock_val + arg = args[0].output_url_prefix + mock_val = "output_url_prefix_value" + assert arg == mock_val def test_export_entities_flattened_error(): @@ -635,12 +794,18 @@ async def test_export_entities_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].labels == {"key_value": "value_value"} - assert args[0].entity_filter == datastore_admin.EntityFilter( - kinds=["kinds_value"] - ) - assert args[0].output_url_prefix == "output_url_prefix_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].labels + mock_val = {"key_value": "value_value"} + assert arg == mock_val + arg = args[0].entity_filter + mock_val = datastore_admin.EntityFilter(kinds=["kinds_value"]) + assert arg == mock_val + arg = args[0].output_url_prefix + mock_val = "output_url_prefix_value" + assert arg == mock_val @pytest.mark.asyncio @@ -661,9 +826,8 @@ async def test_export_entities_flattened_error_async(): ) -def test_import_entities( - transport: str = "grpc", request_type=datastore_admin.ImportEntitiesRequest -): +@pytest.mark.parametrize("request_type", [datastore_admin.ImportEntitiesRequest, dict,]) +def test_import_entities(request_type, transport: str = "grpc"): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -687,10 +851,6 @@ def test_import_entities( assert isinstance(response, future.Future) -def test_import_entities_from_dict(): - test_import_entities(request_type=dict) - - def test_import_entities_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -760,12 +920,18 @@ def test_import_entities_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].labels == {"key_value": "value_value"} - assert args[0].input_url == "input_url_value" - assert args[0].entity_filter == datastore_admin.EntityFilter( - kinds=["kinds_value"] - ) + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].labels + mock_val = {"key_value": "value_value"} + assert arg == mock_val + arg = args[0].input_url + mock_val = "input_url_value" + assert arg == mock_val + arg = args[0].entity_filter + mock_val = datastore_admin.EntityFilter(kinds=["kinds_value"]) + assert arg == mock_val def test_import_entities_flattened_error(): @@ -810,12 +976,18 @@ async def test_import_entities_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].labels == {"key_value": "value_value"} - assert args[0].input_url == "input_url_value" - assert args[0].entity_filter == datastore_admin.EntityFilter( - kinds=["kinds_value"] - ) + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].labels + mock_val = {"key_value": "value_value"} + assert arg == mock_val + arg = args[0].input_url + mock_val = "input_url_value" + assert arg == mock_val + arg = args[0].entity_filter + mock_val = datastore_admin.EntityFilter(kinds=["kinds_value"]) + assert arg == mock_val @pytest.mark.asyncio @@ -836,9 +1008,8 @@ async def test_import_entities_flattened_error_async(): ) -def test_create_index( - transport: str = "grpc", request_type=datastore_admin.CreateIndexRequest -): +@pytest.mark.parametrize("request_type", [datastore_admin.CreateIndexRequest, dict,]) +def test_create_index(request_type, transport: str = "grpc"): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -862,10 +1033,6 @@ def test_create_index( assert isinstance(response, future.Future) -def test_create_index_from_dict(): - test_create_index(request_type=dict) - - def test_create_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -915,9 +1082,8 @@ async def test_create_index_async_from_dict(): await test_create_index_async(request_type=dict) -def test_delete_index( - transport: str = "grpc", request_type=datastore_admin.DeleteIndexRequest -): +@pytest.mark.parametrize("request_type", [datastore_admin.DeleteIndexRequest, dict,]) +def test_delete_index(request_type, transport: str = "grpc"): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -941,10 +1107,6 @@ def test_delete_index( assert isinstance(response, future.Future) -def test_delete_index_from_dict(): - test_delete_index(request_type=dict) - - def test_delete_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -994,9 +1156,8 @@ async def test_delete_index_async_from_dict(): await test_delete_index_async(request_type=dict) -def test_get_index( - transport: str = "grpc", request_type=datastore_admin.GetIndexRequest -): +@pytest.mark.parametrize("request_type", [datastore_admin.GetIndexRequest, dict,]) +def test_get_index(request_type, transport: str = "grpc"): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1031,10 +1192,6 @@ def test_get_index( assert response.state == index.Index.State.CREATING -def test_get_index_from_dict(): - test_get_index(request_type=dict) - - def test_get_index_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1095,9 +1252,8 @@ async def test_get_index_async_from_dict(): await test_get_index_async(request_type=dict) -def test_list_indexes( - transport: str = "grpc", request_type=datastore_admin.ListIndexesRequest -): +@pytest.mark.parametrize("request_type", [datastore_admin.ListIndexesRequest, dict,]) +def test_list_indexes(request_type, transport: str = "grpc"): client = DatastoreAdminClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1124,10 +1280,6 @@ def test_list_indexes( assert response.next_page_token == "next_page_token_value" -def test_list_indexes_from_dict(): - test_list_indexes(request_type=dict) - - def test_list_indexes_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1180,8 +1332,10 @@ async def test_list_indexes_async_from_dict(): await test_list_indexes_async(request_type=dict) -def test_list_indexes_pager(): - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_indexes_pager(transport_name: str = "grpc"): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: @@ -1211,8 +1365,10 @@ def test_list_indexes_pager(): assert all(isinstance(i, index.Index) for i in results) -def test_list_indexes_pages(): - client = DatastoreAdminClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_indexes_pages(transport_name: str = "grpc"): + client = DatastoreAdminClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_indexes), "__call__") as call: @@ -1319,6 +1475,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.DatastoreAdminGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastoreAdminClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastoreAdminClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.DatastoreAdminGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -1837,7 +2010,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( @@ -1902,3 +2075,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DatastoreAdminClient, transports.DatastoreAdminGrpcTransport), + (DatastoreAdminAsyncClient, transports.DatastoreAdminGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/tests/unit/gapic/datastore_v1/__init__.py b/tests/unit/gapic/datastore_v1/__init__.py index 4de65971..e8e1c384 100644 --- a/tests/unit/gapic/datastore_v1/__init__.py +++ b/tests/unit/gapic/datastore_v1/__init__.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/tests/unit/gapic/datastore_v1/test_datastore.py b/tests/unit/gapic/datastore_v1/test_datastore.py index 04ced96f..fee5a408 100644 --- a/tests/unit/gapic/datastore_v1/test_datastore.py +++ b/tests/unit/gapic/datastore_v1/test_datastore.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright 2020 Google LLC +# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -238,20 +238,20 @@ def test_datastore_client_client_options(client_class, transport_class, transpor # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -308,7 +308,7 @@ def test_datastore_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -385,6 +385,83 @@ def test_datastore_client_mtls_env_auto( ) +@pytest.mark.parametrize("client_class", [DatastoreClient, DatastoreAsyncClient]) +@mock.patch.object( + DatastoreClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DatastoreClient) +) +@mock.patch.object( + DatastoreAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DatastoreAsyncClient), +) +def test_datastore_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -403,7 +480,7 @@ def test_datastore_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -417,24 +494,26 @@ def test_datastore_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (DatastoreClient, transports.DatastoreGrpcTransport, "grpc"), + (DatastoreClient, transports.DatastoreGrpcTransport, "grpc", grpc_helpers), ( DatastoreAsyncClient, transports.DatastoreGrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_datastore_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -465,7 +544,72 @@ def test_datastore_client_client_options_from_dict(): ) -def test_lookup(transport: str = "grpc", request_type=datastore.LookupRequest): +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (DatastoreClient, transports.DatastoreGrpcTransport, "grpc", grpc_helpers), + ( + DatastoreAsyncClient, + transports.DatastoreGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_datastore_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "datastore.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/datastore", + ), + scopes=None, + default_host="datastore.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [datastore.LookupRequest, dict,]) +def test_lookup(request_type, transport: str = "grpc"): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -489,10 +633,6 @@ def test_lookup(transport: str = "grpc", request_type=datastore.LookupRequest): assert isinstance(response, datastore.LookupResponse) -def test_lookup_from_dict(): - test_lookup(request_type=dict) - - def test_lookup_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -567,13 +707,19 @@ def test_lookup_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].read_options == datastore.ReadOptions( + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].read_options + mock_val = datastore.ReadOptions( read_consistency=datastore.ReadOptions.ReadConsistency.STRONG ) - assert args[0].keys == [ + assert arg == mock_val + arg = args[0].keys + mock_val = [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] + assert arg == mock_val def test_lookup_flattened_error(): @@ -626,13 +772,19 @@ async def test_lookup_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].read_options == datastore.ReadOptions( + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].read_options + mock_val = datastore.ReadOptions( read_consistency=datastore.ReadOptions.ReadConsistency.STRONG ) - assert args[0].keys == [ + assert arg == mock_val + arg = args[0].keys + mock_val = [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] + assert arg == mock_val @pytest.mark.asyncio @@ -656,7 +808,8 @@ async def test_lookup_flattened_error_async(): ) -def test_run_query(transport: str = "grpc", request_type=datastore.RunQueryRequest): +@pytest.mark.parametrize("request_type", [datastore.RunQueryRequest, dict,]) +def test_run_query(request_type, transport: str = "grpc"): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -680,10 +833,6 @@ def test_run_query(transport: str = "grpc", request_type=datastore.RunQueryReque assert isinstance(response, datastore.RunQueryResponse) -def test_run_query_from_dict(): - test_run_query(request_type=dict) - - def test_run_query_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -733,9 +882,8 @@ async def test_run_query_async_from_dict(): await test_run_query_async(request_type=dict) -def test_begin_transaction( - transport: str = "grpc", request_type=datastore.BeginTransactionRequest -): +@pytest.mark.parametrize("request_type", [datastore.BeginTransactionRequest, dict,]) +def test_begin_transaction(request_type, transport: str = "grpc"): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -764,10 +912,6 @@ def test_begin_transaction( assert response.transaction == b"transaction_blob" -def test_begin_transaction_from_dict(): - test_begin_transaction(request_type=dict) - - def test_begin_transaction_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -839,7 +983,9 @@ def test_begin_transaction_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val def test_begin_transaction_flattened_error(): @@ -875,7 +1021,9 @@ async def test_begin_transaction_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val @pytest.mark.asyncio @@ -890,7 +1038,8 @@ async def test_begin_transaction_flattened_error_async(): ) -def test_commit(transport: str = "grpc", request_type=datastore.CommitRequest): +@pytest.mark.parametrize("request_type", [datastore.CommitRequest, dict,]) +def test_commit(request_type, transport: str = "grpc"): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -915,10 +1064,6 @@ def test_commit(transport: str = "grpc", request_type=datastore.CommitRequest): assert response.index_updates == 1389 -def test_commit_from_dict(): - test_commit(request_type=dict) - - def test_commit_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -999,9 +1144,14 @@ def test_commit_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].mode == datastore.CommitRequest.Mode.TRANSACTIONAL - assert args[0].mutations == [ + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].mode + mock_val = datastore.CommitRequest.Mode.TRANSACTIONAL + assert arg == mock_val + arg = args[0].mutations + mock_val = [ datastore.Mutation( insert=entity.Entity( key=entity.Key( @@ -1010,6 +1160,7 @@ def test_commit_flattened(): ) ) ] + assert arg == mock_val assert args[0].transaction == b"transaction_blob" @@ -1073,9 +1224,14 @@ async def test_commit_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].mode == datastore.CommitRequest.Mode.TRANSACTIONAL - assert args[0].mutations == [ + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].mode + mock_val = datastore.CommitRequest.Mode.TRANSACTIONAL + assert arg == mock_val + arg = args[0].mutations + mock_val = [ datastore.Mutation( insert=entity.Entity( key=entity.Key( @@ -1084,6 +1240,7 @@ async def test_commit_flattened_async(): ) ) ] + assert arg == mock_val assert args[0].transaction == b"transaction_blob" @@ -1113,7 +1270,8 @@ async def test_commit_flattened_error_async(): ) -def test_rollback(transport: str = "grpc", request_type=datastore.RollbackRequest): +@pytest.mark.parametrize("request_type", [datastore.RollbackRequest, dict,]) +def test_rollback(request_type, transport: str = "grpc"): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1137,10 +1295,6 @@ def test_rollback(transport: str = "grpc", request_type=datastore.RollbackReques assert isinstance(response, datastore.RollbackResponse) -def test_rollback_from_dict(): - test_rollback(request_type=dict) - - def test_rollback_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1207,8 +1361,12 @@ def test_rollback_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].transaction == b"transaction_blob" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].transaction + mock_val = b"transaction_blob" + assert arg == mock_val def test_rollback_flattened_error(): @@ -1246,8 +1404,12 @@ async def test_rollback_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].transaction == b"transaction_blob" + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].transaction + mock_val = b"transaction_blob" + assert arg == mock_val @pytest.mark.asyncio @@ -1264,9 +1426,8 @@ async def test_rollback_flattened_error_async(): ) -def test_allocate_ids( - transport: str = "grpc", request_type=datastore.AllocateIdsRequest -): +@pytest.mark.parametrize("request_type", [datastore.AllocateIdsRequest, dict,]) +def test_allocate_ids(request_type, transport: str = "grpc"): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1290,10 +1451,6 @@ def test_allocate_ids( assert isinstance(response, datastore.AllocateIdsResponse) -def test_allocate_ids_from_dict(): - test_allocate_ids(request_type=dict) - - def test_allocate_ids_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1365,10 +1522,14 @@ def test_allocate_ids_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].keys == [ + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].keys + mock_val = [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] + assert arg == mock_val def test_allocate_ids_flattened_error(): @@ -1415,10 +1576,14 @@ async def test_allocate_ids_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].keys == [ + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].keys + mock_val = [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] + assert arg == mock_val @pytest.mark.asyncio @@ -1439,7 +1604,8 @@ async def test_allocate_ids_flattened_error_async(): ) -def test_reserve_ids(transport: str = "grpc", request_type=datastore.ReserveIdsRequest): +@pytest.mark.parametrize("request_type", [datastore.ReserveIdsRequest, dict,]) +def test_reserve_ids(request_type, transport: str = "grpc"): client = DatastoreClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1463,10 +1629,6 @@ def test_reserve_ids(transport: str = "grpc", request_type=datastore.ReserveIdsR assert isinstance(response, datastore.ReserveIdsResponse) -def test_reserve_ids_from_dict(): - test_reserve_ids(request_type=dict) - - def test_reserve_ids_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1538,10 +1700,14 @@ def test_reserve_ids_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].keys == [ + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].keys + mock_val = [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] + assert arg == mock_val def test_reserve_ids_flattened_error(): @@ -1588,10 +1754,14 @@ async def test_reserve_ids_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].project_id == "project_id_value" - assert args[0].keys == [ + arg = args[0].project_id + mock_val = "project_id_value" + assert arg == mock_val + arg = args[0].keys + mock_val = [ entity.Key(partition_id=entity.PartitionId(project_id="project_id_value")) ] + assert arg == mock_val @pytest.mark.asyncio @@ -1632,6 +1802,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.DatastoreGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastoreClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DatastoreClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.DatastoreGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2103,7 +2290,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( @@ -2168,3 +2355,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DatastoreClient, transports.DatastoreGrpcTransport), + (DatastoreAsyncClient, transports.DatastoreGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/tests/unit/test_batch.py b/tests/unit/test_batch.py index fffbefa2..0e45ed97 100644 --- a/tests/unit/test_batch.py +++ b/tests/unit/test_batch.py @@ -12,6 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Dict +from typing import Any + import mock import pytest @@ -498,7 +501,7 @@ def test__parse_commit_response(): class _Entity(dict): key = None exclude_from_indexes = () - _meanings = {} + _meanings: Dict[str, Any] = {} class _Key(object): diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 7f38a5ad..da253deb 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -12,6 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Dict +from typing import Any + import mock import pytest @@ -1522,7 +1525,7 @@ def __exit__(self, *args): class _Entity(dict): key = None exclude_from_indexes = () - _meanings = {} + _meanings: Dict[str, Any] = {} class _Key(object):